Compare commits
24 Commits
sketch-on-
...
refactor-l
Author | SHA1 | Date | |
---|---|---|---|
2363010cbc | |||
bd17bc98d5 | |||
e7a2824cb2 | |||
f77ed3d790 | |||
bc5fdbad43 | |||
053bdffc45 | |||
9ffc08b84a | |||
47e8d3f4fc | |||
53db421d97 | |||
289ed291c4 | |||
39ceb83840 | |||
af449ff6ca | |||
e1bf55cc4a | |||
8fe2d33063 | |||
d7e36eed24 | |||
6123ed6a82 | |||
da6cd5cf9f | |||
ad5bfa1a29 | |||
6358bdd7cd | |||
d6fe414b2e | |||
25e7e8cb89 | |||
a175870453 | |||
24516cdb2d | |||
496398de52 |
4
.github/workflows/ci.yml
vendored
@ -367,7 +367,7 @@ jobs:
|
||||
export VITE_KC_API_BASE_URL
|
||||
xvfb-run yarn test:e2e:tauri
|
||||
env:
|
||||
E2E_APPLICATION: "./src-tauri/target/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/zoo-modeling-app"
|
||||
E2E_APPLICATION: "./src-tauri/target/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/app"
|
||||
KITTYCAD_API_TOKEN: ${{ env.BUILD_RELEASE == 'true' && secrets.KITTYCAD_API_TOKEN || secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
- name: Run e2e tests (windows only)
|
||||
@ -376,7 +376,7 @@ jobs:
|
||||
cargo install tauri-driver --force
|
||||
yarn wdio run wdio.conf.ts
|
||||
env:
|
||||
E2E_APPLICATION: ".\\src-tauri\\target\\${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}\\Zoo Modeling App.exe"
|
||||
E2E_APPLICATION: ".\\src-tauri\\target\\${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}\\app.exe"
|
||||
KITTYCAD_API_TOKEN: ${{ env.BUILD_RELEASE == 'true' && secrets.KITTYCAD_API_TOKEN || secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
VITE_KC_API_BASE_URL: ${{ env.BUILD_RELEASE == 'true' && 'https://api.zoo.dev' || 'https://api.dev.zoo.dev' }}
|
||||
E2E_TAURI_ENABLED: true
|
||||
|
43
.github/workflows/playwright.yml
vendored
@ -90,7 +90,10 @@ jobs:
|
||||
- name: build web
|
||||
run: yarn build:local
|
||||
- name: Run ubuntu/chrome snapshots
|
||||
run: yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
|
||||
run: |
|
||||
yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
|
||||
# remove test-results, messes with retry logic
|
||||
rm -r test-results
|
||||
env:
|
||||
CI: true
|
||||
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
@ -121,7 +124,7 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: steps.git-check.outputs.modified == 'true'
|
||||
with:
|
||||
name: playwright-report
|
||||
name: playwright-report-ubuntu
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
# if have previous run results, use them
|
||||
@ -129,16 +132,18 @@ jobs:
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: test-results
|
||||
name: test-results-ubuntu
|
||||
path: test-results/
|
||||
- name: Run ubuntu/chrome flow retry failures
|
||||
id: retry
|
||||
if: always()
|
||||
run: |
|
||||
ls -1 "test-results"
|
||||
if [[ $(ls -1 "test-results" | wc -l) == "0" ]];
|
||||
then echo "retried=false" >> $GITHUB_OUTPUT; exit 0;
|
||||
else echo "retried=true" >> $GITHUB_OUTPUT;
|
||||
if [[ -d "test-results" ]];
|
||||
then if [[ $(ls -1 "test-results" | wc -l) != "0" ]];
|
||||
then echo "retried=true" >> $GITHUB_OUTPUT;
|
||||
else echo "retried=false" >> $GITHUB_OUTPUT; exit 0;
|
||||
fi;
|
||||
else echo "retried=false" >> $GITHUB_OUTPUT; exit 0;
|
||||
fi;
|
||||
yarn playwright test --project="Google Chrome" --last-failed e2e/playwright/flow-tests.spec.ts
|
||||
env:
|
||||
@ -153,14 +158,14 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results
|
||||
name: test-results-ubuntu
|
||||
path: test-results/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-report
|
||||
name: playwright-report-ubuntu
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
@ -227,23 +232,25 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: test-results
|
||||
name: test-results-macos
|
||||
path: test-results/
|
||||
- name: Run macos/safari flow retry failures
|
||||
id: retry
|
||||
continue-on-error: true
|
||||
if: ${{ success() }}
|
||||
if: always()
|
||||
run: |
|
||||
if [ -d "test-results" ];
|
||||
then echo "retried=true" >> $GITHUB_OUTPUT;
|
||||
else echo "retried=false" >> $GITHUB_OUTPUT;
|
||||
if [[ -d "test-results" ]];
|
||||
then if [[ $(ls -1 "test-results" | wc -l) != "0" ]];
|
||||
then echo "retried=true" >> $GITHUB_OUTPUT;
|
||||
else echo "retried=false" >> $GITHUB_OUTPUT; exit 0;
|
||||
fi;
|
||||
else echo "retried=false" >> $GITHUB_OUTPUT; exit 0;
|
||||
fi;
|
||||
yarn playwright test --project="webkit" --last-failed e2e/playwright/flow-tests.spec.ts
|
||||
env:
|
||||
CI: true
|
||||
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
- name: Run macos/safari flow
|
||||
if: ${{ steps.retry.outputs.retried != 'true' }}
|
||||
if: steps.retry.outputs.retried == 'false'
|
||||
# webkit doesn't work on Ubuntu because of the same reason tauri doesn't (webRTC issues)
|
||||
# TODO remove this and the matrix and run all tests on ubuntu when this is fixed
|
||||
run: yarn playwright test --project="webkit" e2e/playwright/flow-tests.spec.ts
|
||||
@ -253,14 +260,14 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: test-results
|
||||
name: test-results-macos
|
||||
path: test-results/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: playwright-report
|
||||
name: playwright-report-macos
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
|
@ -17,11 +17,8 @@ angleToMatchLengthX(segment_name: TagIdentifier, to: number, sketch_group: Sketc
|
||||
```js
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([2, 5], %, 'seg01')
|
||||
|> angledLineToX([
|
||||
-angleToMatchLengthX('seg01', 7, %),
|
||||
10
|
||||
], %)
|
||||
|> line([2, 5], %, $seg01)
|
||||
|> angledLineToX([-angleToMatchLengthX(seg01, 7, %), 10], %)
|
||||
|> close(%)
|
||||
|
||||
const extrusion = extrude(5, sketch001)
|
||||
|
@ -17,9 +17,9 @@ angleToMatchLengthY(segment_name: TagIdentifier, to: number, sketch_group: Sketc
|
||||
```js
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([1, 2], %, 'seg01')
|
||||
|> line([1, 2], %, $seg01)
|
||||
|> angledLine({
|
||||
angle: angleToMatchLengthY('seg01', 15, %),
|
||||
angle: angleToMatchLengthY(seg01, 15, %),
|
||||
length: 5
|
||||
}, %)
|
||||
|> yLineTo(0, %)
|
||||
|
@ -17,10 +17,10 @@ angledLineOfXLength(data: AngledLineData, sketch_group: SketchGroup, tag?: TagDe
|
||||
```js
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> angledLineOfXLength({ angle: 45, length: 10 }, %, "edge1")
|
||||
|> angledLineOfXLength({ angle: -15, length: 20 }, %, "edge2")
|
||||
|> angledLineOfXLength({ angle: 45, length: 10 }, %, $edge1)
|
||||
|> angledLineOfXLength({ angle: -15, length: 20 }, %, $edge2)
|
||||
|> line([0, -5], %)
|
||||
|> close(%, "edge3")
|
||||
|> close(%, $edge3)
|
||||
|
||||
const extrusion = extrude(10, sketch001)
|
||||
```
|
||||
|
@ -18,7 +18,7 @@ angledLineThatIntersects(data: AngledLineThatIntersectsData, sketch_group: Sketc
|
||||
const exampleSketch = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> lineTo([5, 10], %)
|
||||
|> lineTo([-10, 10], %, "lineToIntersect")
|
||||
|> lineTo([-10, 10], %, $lineToIntersect)
|
||||
|> lineTo([0, 20], %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 80,
|
||||
|
@ -22,19 +22,19 @@ const chamferLength = 2
|
||||
|
||||
const mountingPlateSketch = startSketchOn("XY")
|
||||
|> startProfileAt([-width / 2, -length / 2], %)
|
||||
|> lineTo([width / 2, -length / 2], %, 'edge1')
|
||||
|> lineTo([width / 2, length / 2], %, 'edge2')
|
||||
|> lineTo([-width / 2, length / 2], %, 'edge3')
|
||||
|> close(%, 'edge4')
|
||||
|> lineTo([width / 2, -length / 2], %, $edge1)
|
||||
|> lineTo([width / 2, length / 2], %, $edge2)
|
||||
|> lineTo([-width / 2, length / 2], %, $edge3)
|
||||
|> close(%, $edge4)
|
||||
|
||||
const mountingPlate = extrude(thickness, mountingPlateSketch)
|
||||
|> chamfer({
|
||||
length: chamferLength,
|
||||
tags: [
|
||||
getNextAdjacentEdge('edge1', %),
|
||||
getNextAdjacentEdge('edge2', %),
|
||||
getNextAdjacentEdge('edge3', %),
|
||||
getNextAdjacentEdge('edge4', %)
|
||||
getNextAdjacentEdge(edge1, %),
|
||||
getNextAdjacentEdge(edge2, %),
|
||||
getNextAdjacentEdge(edge3, %),
|
||||
getNextAdjacentEdge(edge4, %)
|
||||
]
|
||||
}, %)
|
||||
```
|
||||
|
@ -9,7 +9,7 @@ Sketch a circle.
|
||||
|
||||
|
||||
```js
|
||||
circle(center: [number], radius: number, tag?: TagDeclarator, sketch_surface_or_group: SketchSurfaceOrGroup) -> SketchGroup
|
||||
circle(center: [number], radius: number, sketch_surface_or_group: SketchSurfaceOrGroup, tag?: TagDeclarator) -> SketchGroup
|
||||
```
|
||||
|
||||
### Examples
|
||||
@ -41,14 +41,6 @@ const example = extrude(5, exampleSketch)
|
||||
|
||||
* `center`: `[number]` (REQUIRED)
|
||||
* `radius`: `number` (REQUIRED)
|
||||
* `tag`: `TagDeclarator` (OPTIONAL)
|
||||
```js
|
||||
{
|
||||
end: number,
|
||||
start: number,
|
||||
value: string,
|
||||
}
|
||||
```
|
||||
* `sketch_surface_or_group`: `SketchSurfaceOrGroup` - A sketch surface or a sketch group. (REQUIRED)
|
||||
```js
|
||||
{
|
||||
@ -510,6 +502,14 @@ const example = extrude(5, exampleSketch)
|
||||
}],
|
||||
}
|
||||
```
|
||||
* `tag`: `TagDeclarator` (OPTIONAL)
|
||||
```js
|
||||
{
|
||||
end: number,
|
||||
start: number,
|
||||
value: string,
|
||||
}
|
||||
```
|
||||
|
||||
### Returns
|
||||
|
||||
|
@ -22,19 +22,19 @@ const filletRadius = 2
|
||||
|
||||
const mountingPlateSketch = startSketchOn("XY")
|
||||
|> startProfileAt([-width / 2, -length / 2], %)
|
||||
|> lineTo([width / 2, -length / 2], %, 'edge1')
|
||||
|> lineTo([width / 2, length / 2], %, 'edge2')
|
||||
|> lineTo([-width / 2, length / 2], %, 'edge3')
|
||||
|> close(%, 'edge4')
|
||||
|> lineTo([width / 2, -length / 2], %, $edge1)
|
||||
|> lineTo([width / 2, length / 2], %, $edge2)
|
||||
|> lineTo([-width / 2, length / 2], %, $edge3)
|
||||
|> close(%, $edge4)
|
||||
|
||||
const mountingPlate = extrude(thickness, mountingPlateSketch)
|
||||
|> fillet({
|
||||
radius: filletRadius,
|
||||
tags: [
|
||||
getNextAdjacentEdge('edge1', %),
|
||||
getNextAdjacentEdge('edge2', %),
|
||||
getNextAdjacentEdge('edge3', %),
|
||||
getNextAdjacentEdge('edge4', %)
|
||||
getNextAdjacentEdge(edge1, %),
|
||||
getNextAdjacentEdge(edge2, %),
|
||||
getNextAdjacentEdge(edge3, %),
|
||||
getNextAdjacentEdge(edge4, %)
|
||||
]
|
||||
}, %)
|
||||
```
|
||||
|
@ -17,7 +17,7 @@ getEdge(tag: TagIdentifier, extrude_group: ExtrudeGroup) -> Uuid
|
||||
```js
|
||||
const box = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([0, 10], %, 'revolveAxis')
|
||||
|> line([0, 10], %, $revolveAxis)
|
||||
|> line([10, 0], %)
|
||||
|> line([0, -10], %)
|
||||
|> close(%)
|
||||
@ -30,7 +30,7 @@ const revolution = startSketchOn(box, "revolveAxis")
|
||||
|> line([0, -10], %)
|
||||
|> close(%)
|
||||
|> revolve({
|
||||
axis: getEdge('revolveAxis', box),
|
||||
axis: getEdge(revolveAxis, box),
|
||||
angle: 90
|
||||
}, %)
|
||||
```
|
||||
|
@ -21,15 +21,13 @@ const exampleSketch = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 60, length: 10 }, %)
|
||||
|> angledLine({ angle: 120, length: 10 }, %)
|
||||
|> line([-10, 0], %)
|
||||
|> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')
|
||||
|> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)
|
||||
|> close(%)
|
||||
|
||||
const example = extrude(5, exampleSketch)
|
||||
|> fillet({
|
||||
radius: 3,
|
||||
tags: [
|
||||
getNextAdjacentEdge("referenceEdge", %)
|
||||
]
|
||||
tags: [getNextAdjacentEdge(referenceEdge, %)]
|
||||
}, %)
|
||||
```
|
||||
|
||||
|
@ -21,13 +21,13 @@ const exampleSketch = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 60, length: 10 }, %)
|
||||
|> angledLine({ angle: 120, length: 10 }, %)
|
||||
|> line([-10, 0], %)
|
||||
|> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')
|
||||
|> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)
|
||||
|> close(%)
|
||||
|
||||
const example = extrude(5, exampleSketch)
|
||||
|> fillet({
|
||||
radius: 3,
|
||||
tags: [getOppositeEdge("referenceEdge", %)]
|
||||
tags: [getOppositeEdge(referenceEdge, %)]
|
||||
}, %)
|
||||
```
|
||||
|
||||
|
@ -21,14 +21,14 @@ const exampleSketch = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 60, length: 10 }, %)
|
||||
|> angledLine({ angle: 120, length: 10 }, %)
|
||||
|> line([-10, 0], %)
|
||||
|> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')
|
||||
|> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)
|
||||
|> close(%)
|
||||
|
||||
const example = extrude(5, exampleSketch)
|
||||
|> fillet({
|
||||
radius: 3,
|
||||
tags: [
|
||||
getPreviousAdjacentEdge("referenceEdge", %)
|
||||
getPreviousAdjacentEdge(referenceEdge, %)
|
||||
]
|
||||
}, %)
|
||||
```
|
||||
|
@ -17,9 +17,9 @@ profileStart(sketch_group: SketchGroup) -> [number]
|
||||
```js
|
||||
const sketch001 = startSketchOn('XY')
|
||||
|> startProfileAt([5, 2], %)
|
||||
|> angledLine({ angle: 120, length: 50 }, %, 'seg01')
|
||||
|> angledLine({ angle: 120, length: 50 }, %, $seg01)
|
||||
|> angledLine({
|
||||
angle: segAng('seg01', %) + 120,
|
||||
angle: segAng(seg01, %) + 120,
|
||||
length: 50
|
||||
}, %)
|
||||
|> lineTo(profileStart(%), %)
|
||||
|
@ -99,7 +99,7 @@ const box = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, -20], %, 'revolveAxis')
|
||||
|> line([0, -20], %, $revolveAxis)
|
||||
|> close(%)
|
||||
|> extrude(20, %)
|
||||
|
||||
@ -107,7 +107,7 @@ const sketch001 = startSketchOn(box, "END")
|
||||
|> circle([10, 10], 4, %)
|
||||
|> revolve({
|
||||
angle: 90,
|
||||
axis: getOppositeEdge('revolveAxis', box)
|
||||
axis: getOppositeEdge(revolveAxis, box)
|
||||
}, %)
|
||||
```
|
||||
|
||||
|
@ -18,11 +18,11 @@ segAng(segment_name: TagIdentifier, sketch_group: SketchGroup) -> number
|
||||
const exampleSketch = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([10, 0], %)
|
||||
|> line([5, 10], %, 'seg01')
|
||||
|> line([5, 10], %, $seg01)
|
||||
|> line([-10, 0], %)
|
||||
|> angledLine([segAng('seg01', %), 10], %)
|
||||
|> angledLine([segAng(seg01, %), 10], %)
|
||||
|> line([-10, 0], %)
|
||||
|> angledLine([segAng('seg01', %), -15], %)
|
||||
|> angledLine([segAng(seg01, %), -15], %)
|
||||
|> close(%)
|
||||
|
||||
const example = extrude(4, exampleSketch)
|
||||
|
@ -17,9 +17,9 @@ segEndX(segment_name: TagIdentifier, sketch_group: SketchGroup) -> number
|
||||
```js
|
||||
const exampleSketch = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([20, 0], %, "thing")
|
||||
|> line([20, 0], %, $thing)
|
||||
|> line([0, 5], %)
|
||||
|> line([segEndX("thing", %), 0], %)
|
||||
|> line([segEndX(thing, %), 0], %)
|
||||
|> line([-20, 10], %)
|
||||
|> close(%)
|
||||
|
||||
|
@ -18,9 +18,9 @@ segEndY(segment_name: TagIdentifier, sketch_group: SketchGroup) -> number
|
||||
const exampleSketch = startSketchOn('XZ')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 3], %, "thing")
|
||||
|> line([0, 3], %, $thing)
|
||||
|> line([-10, 0], %)
|
||||
|> line([0, segEndY("thing", %)], %)
|
||||
|> line([0, segEndY(thing, %)], %)
|
||||
|> line([-10, 0], %)
|
||||
|> close(%)
|
||||
|
||||
|
@ -17,12 +17,9 @@ segLen(segment_name: TagIdentifier, sketch_group: SketchGroup) -> number
|
||||
```js
|
||||
const exampleSketch = startSketchOn("XZ")
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> angledLine({ angle: 60, length: 10 }, %, "thing")
|
||||
|> angledLine({ angle: 60, length: 10 }, %, $thing)
|
||||
|> tangentialArc({ offset: -120, radius: 5 }, %)
|
||||
|> angledLine({
|
||||
angle: -60,
|
||||
length: segLen("thing", %)
|
||||
}, %)
|
||||
|> angledLine({ angle: -60, length: segLen(thing, %) }, %)
|
||||
|> close(%)
|
||||
|
||||
const example = extrude(5, exampleSketch)
|
||||
|
@ -49,7 +49,7 @@ const example003 = extrude(5, exampleSketch003)
|
||||
const exampleSketch = startSketchOn("XY")
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([10, 0], %)
|
||||
|> line([0, 10], %, 'sketchingFace')
|
||||
|> line([0, 10], %, $sketchingFace)
|
||||
|> line([-10, 0], %)
|
||||
|> close(%)
|
||||
|
||||
@ -60,7 +60,7 @@ const exampleSketch002 = startSketchOn(example, 'sketchingFace')
|
||||
|> line([8, 0], %)
|
||||
|> line([0, 8], %)
|
||||
|> line([-8, 0], %)
|
||||
|> close(%, 'sketchingFace002')
|
||||
|> close(%, $sketchingFace002)
|
||||
|
||||
const example002 = extrude(10, exampleSketch002)
|
||||
|
||||
|
@ -2127,7 +2127,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([2, 5], %, 'seg01')\n |> angledLineToX([\n -angleToMatchLengthX('seg01', 7, %),\n 10\n ], %)\n |> close(%)\n\nconst extrusion = extrude(5, sketch001)"
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([2, 5], %, $seg01)\n |> angledLineToX([-angleToMatchLengthX(seg01, 7, %), 10], %)\n |> close(%)\n\nconst extrusion = extrude(5, sketch001)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -4192,7 +4192,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([1, 2], %, 'seg01')\n |> angledLine({\n angle: angleToMatchLengthY('seg01', 15, %),\n length: 5\n }, %)\n |> yLineTo(0, %)\n |> close(%)\n\nconst extrusion = extrude(5, sketch001)"
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([1, 2], %, $seg01)\n |> angledLine({\n angle: angleToMatchLengthY(seg01, 15, %),\n length: 5\n }, %)\n |> yLineTo(0, %)\n |> close(%)\n\nconst extrusion = extrude(5, sketch001)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -12332,7 +12332,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> angledLineOfXLength({ angle: 45, length: 10 }, %, \"edge1\")\n |> angledLineOfXLength({ angle: -15, length: 20 }, %, \"edge2\")\n |> line([0, -5], %)\n |> close(%, \"edge3\")\n\nconst extrusion = extrude(10, sketch001)"
|
||||
"const sketch001 = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> angledLineOfXLength({ angle: 45, length: 10 }, %, $edge1)\n |> angledLineOfXLength({ angle: -15, length: 20 }, %, $edge2)\n |> line([0, -5], %)\n |> close(%, $edge3)\n\nconst extrusion = extrude(10, sketch001)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -20494,7 +20494,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> lineTo([5, 10], %)\n |> lineTo([-10, 10], %, \"lineToIntersect\")\n |> lineTo([0, 20], %)\n |> angledLineThatIntersects({\n angle: 80,\n intersectTag: 'lineToIntersect',\n offset: 10\n }, %)\n |> close(%)\n\nconst example = extrude(10, exampleSketch)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> lineTo([5, 10], %)\n |> lineTo([-10, 10], %, $lineToIntersect)\n |> lineTo([0, 20], %)\n |> angledLineThatIntersects({\n angle: 80,\n intersectTag: 'lineToIntersect',\n offset: 10\n }, %)\n |> close(%)\n\nconst example = extrude(10, exampleSketch)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -40165,7 +40165,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const width = 20\nconst length = 10\nconst thickness = 1\nconst chamferLength = 2\n\nconst mountingPlateSketch = startSketchOn(\"XY\")\n |> startProfileAt([-width / 2, -length / 2], %)\n |> lineTo([width / 2, -length / 2], %, 'edge1')\n |> lineTo([width / 2, length / 2], %, 'edge2')\n |> lineTo([-width / 2, length / 2], %, 'edge3')\n |> close(%, 'edge4')\n\nconst mountingPlate = extrude(thickness, mountingPlateSketch)\n |> chamfer({\n length: chamferLength,\n tags: [\n getNextAdjacentEdge('edge1', %),\n getNextAdjacentEdge('edge2', %),\n getNextAdjacentEdge('edge3', %),\n getNextAdjacentEdge('edge4', %)\n ]\n }, %)"
|
||||
"const width = 20\nconst length = 10\nconst thickness = 1\nconst chamferLength = 2\n\nconst mountingPlateSketch = startSketchOn(\"XY\")\n |> startProfileAt([-width / 2, -length / 2], %)\n |> lineTo([width / 2, -length / 2], %, $edge1)\n |> lineTo([width / 2, length / 2], %, $edge2)\n |> lineTo([-width / 2, length / 2], %, $edge3)\n |> close(%, $edge4)\n\nconst mountingPlate = extrude(thickness, mountingPlateSketch)\n |> chamfer({\n length: chamferLength,\n tags: [\n getNextAdjacentEdge(edge1, %),\n getNextAdjacentEdge(edge2, %),\n getNextAdjacentEdge(edge3, %),\n getNextAdjacentEdge(edge4, %)\n ]\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -40197,35 +40197,6 @@
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"name": "tag",
|
||||
"type": "TagDeclarator",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"end",
|
||||
"start",
|
||||
"value"
|
||||
],
|
||||
"properties": {
|
||||
"end": {
|
||||
"type": "integer",
|
||||
"format": "uint",
|
||||
"minimum": 0.0
|
||||
},
|
||||
"start": {
|
||||
"type": "integer",
|
||||
"format": "uint",
|
||||
"minimum": 0.0
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"nullable": true
|
||||
},
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"name": "sketch_surface_or_group",
|
||||
"type": "SketchSurfaceOrGroup",
|
||||
@ -42811,6 +42782,35 @@
|
||||
]
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"name": "tag",
|
||||
"type": "TagDeclarator",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"end",
|
||||
"start",
|
||||
"value"
|
||||
],
|
||||
"properties": {
|
||||
"end": {
|
||||
"type": "integer",
|
||||
"format": "uint",
|
||||
"minimum": 0.0
|
||||
},
|
||||
"start": {
|
||||
"type": "integer",
|
||||
"format": "uint",
|
||||
"minimum": 0.0
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"nullable": true
|
||||
},
|
||||
"required": false
|
||||
}
|
||||
],
|
||||
"returnValue": {
|
||||
@ -58390,7 +58390,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const width = 20\nconst length = 10\nconst thickness = 1\nconst filletRadius = 2\n\nconst mountingPlateSketch = startSketchOn(\"XY\")\n |> startProfileAt([-width / 2, -length / 2], %)\n |> lineTo([width / 2, -length / 2], %, 'edge1')\n |> lineTo([width / 2, length / 2], %, 'edge2')\n |> lineTo([-width / 2, length / 2], %, 'edge3')\n |> close(%, 'edge4')\n\nconst mountingPlate = extrude(thickness, mountingPlateSketch)\n |> fillet({\n radius: filletRadius,\n tags: [\n getNextAdjacentEdge('edge1', %),\n getNextAdjacentEdge('edge2', %),\n getNextAdjacentEdge('edge3', %),\n getNextAdjacentEdge('edge4', %)\n ]\n }, %)"
|
||||
"const width = 20\nconst length = 10\nconst thickness = 1\nconst filletRadius = 2\n\nconst mountingPlateSketch = startSketchOn(\"XY\")\n |> startProfileAt([-width / 2, -length / 2], %)\n |> lineTo([width / 2, -length / 2], %, $edge1)\n |> lineTo([width / 2, length / 2], %, $edge2)\n |> lineTo([-width / 2, length / 2], %, $edge3)\n |> close(%, $edge4)\n\nconst mountingPlate = extrude(thickness, mountingPlateSketch)\n |> fillet({\n radius: filletRadius,\n tags: [\n getNextAdjacentEdge(edge1, %),\n getNextAdjacentEdge(edge2, %),\n getNextAdjacentEdge(edge3, %),\n getNextAdjacentEdge(edge4, %)\n ]\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -60071,7 +60071,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const box = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([0, 10], %, 'revolveAxis')\n |> line([10, 0], %)\n |> line([0, -10], %)\n |> close(%)\n |> extrude(10, %)\n\nconst revolution = startSketchOn(box, \"revolveAxis\")\n |> startProfileAt([5, 10], %)\n |> line([0, 10], %)\n |> line([2, 0], %)\n |> line([0, -10], %)\n |> close(%)\n |> revolve({\n axis: getEdge('revolveAxis', box),\n angle: 90\n }, %)"
|
||||
"const box = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([0, 10], %, $revolveAxis)\n |> line([10, 0], %)\n |> line([0, -10], %)\n |> close(%)\n |> extrude(10, %)\n\nconst revolution = startSketchOn(box, \"revolveAxis\")\n |> startProfileAt([5, 10], %)\n |> line([0, 10], %)\n |> line([2, 0], %)\n |> line([0, -10], %)\n |> close(%)\n |> revolve({\n axis: getEdge(revolveAxis, box),\n angle: 90\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -61719,7 +61719,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [\n getNextAdjacentEdge(\"referenceEdge\", %)\n ]\n }, %)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [getNextAdjacentEdge(referenceEdge, %)]\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -63367,7 +63367,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [getOppositeEdge(\"referenceEdge\", %)]\n }, %)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [getOppositeEdge(referenceEdge, %)]\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -65015,7 +65015,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, 'referenceEdge')\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [\n getPreviousAdjacentEdge(\"referenceEdge\", %)\n ]\n }, %)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %)\n |> angledLine({ angle: 120, length: 10 }, %)\n |> line([-10, 0], %)\n |> angledLine({ angle: 240, length: 10 }, %, $referenceEdge)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n |> fillet({\n radius: 3,\n tags: [\n getPreviousAdjacentEdge(referenceEdge, %)\n ]\n }, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -108585,7 +108585,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const sketch001 = startSketchOn('XY')\n |> startProfileAt([5, 2], %)\n |> angledLine({ angle: 120, length: 50 }, %, 'seg01')\n |> angledLine({\n angle: segAng('seg01', %) + 120,\n length: 50\n }, %)\n |> lineTo(profileStart(%), %)\n |> close(%)\n |> extrude(20, %)"
|
||||
"const sketch001 = startSketchOn('XY')\n |> startProfileAt([5, 2], %)\n |> angledLine({ angle: 120, length: 50 }, %, $seg01)\n |> angledLine({\n angle: segAng(seg01, %) + 120,\n length: 50\n }, %)\n |> lineTo(profileStart(%), %)\n |> close(%)\n |> extrude(20, %)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -116372,7 +116372,7 @@
|
||||
"const part001 = startSketchOn('XY')\n |> startProfileAt([4, 12], %)\n |> line([2, 0], %)\n |> line([0, -6], %)\n |> line([4, -6], %)\n |> line([0, -6], %)\n |> line([-3.75, -4.5], %)\n |> line([0, -5.5], %)\n |> line([-2, 0], %)\n |> close(%)\n |> revolve({ axis: 'y', angle: 180 }, %)",
|
||||
"const part001 = startSketchOn('XY')\n |> startProfileAt([4, 12], %)\n |> line([2, 0], %)\n |> line([0, -6], %)\n |> line([4, -6], %)\n |> line([0, -6], %)\n |> line([-3.75, -4.5], %)\n |> line([0, -5.5], %)\n |> line([-2, 0], %)\n |> close(%)\n |> revolve({ axis: 'y', angle: 180 }, %)\nconst part002 = startSketchOn(part001, 'end')\n |> startProfileAt([4.5, -5], %)\n |> line([0, 5], %)\n |> line([5, 0], %)\n |> line([0, -5], %)\n |> close(%)\n |> extrude(5, %)",
|
||||
"const box = startSketchOn('XY')\n |> startProfileAt([0, 0], %)\n |> line([0, 20], %)\n |> line([20, 0], %)\n |> line([0, -20], %)\n |> close(%)\n |> extrude(20, %)\n\nconst sketch001 = startSketchOn(box, \"END\")\n |> circle([10, 10], 4, %)\n |> revolve({ angle: -90, axis: 'y' }, %)",
|
||||
"const box = startSketchOn('XY')\n |> startProfileAt([0, 0], %)\n |> line([0, 20], %)\n |> line([20, 0], %)\n |> line([0, -20], %, 'revolveAxis')\n |> close(%)\n |> extrude(20, %)\n\nconst sketch001 = startSketchOn(box, \"END\")\n |> circle([10, 10], 4, %)\n |> revolve({\n angle: 90,\n axis: getOppositeEdge('revolveAxis', box)\n }, %)",
|
||||
"const box = startSketchOn('XY')\n |> startProfileAt([0, 0], %)\n |> line([0, 20], %)\n |> line([20, 0], %)\n |> line([0, -20], %, $revolveAxis)\n |> close(%)\n |> extrude(20, %)\n\nconst sketch001 = startSketchOn(box, \"END\")\n |> circle([10, 10], 4, %)\n |> revolve({\n angle: 90,\n axis: getOppositeEdge(revolveAxis, box)\n }, %)",
|
||||
"const sketch001 = startSketchOn('XY')\n |> startProfileAt([10, 0], %)\n |> line([5, -5], %)\n |> line([5, 5], %)\n |> lineTo([profileStartX(%), profileStartY(%)], %)\n |> close(%)\n\nconst part001 = revolve({\n axis: {\n custom: {\n axis: [0.0, 1.0, 0.0],\n origin: [0.0, 0.0, 0.0]\n }\n }\n}, sketch001)"
|
||||
]
|
||||
},
|
||||
@ -118429,7 +118429,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> line([5, 10], %, 'seg01')\n |> line([-10, 0], %)\n |> angledLine([segAng('seg01', %), 10], %)\n |> line([-10, 0], %)\n |> angledLine([segAng('seg01', %), -15], %)\n |> close(%)\n\nconst example = extrude(4, exampleSketch)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> line([5, 10], %, $seg01)\n |> line([-10, 0], %)\n |> angledLine([segAng(seg01, %), 10], %)\n |> line([-10, 0], %)\n |> angledLine([segAng(seg01, %), -15], %)\n |> close(%)\n\nconst example = extrude(4, exampleSketch)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -120485,7 +120485,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([20, 0], %, \"thing\")\n |> line([0, 5], %)\n |> line([segEndX(\"thing\", %), 0], %)\n |> line([-20, 10], %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([20, 0], %, $thing)\n |> line([0, 5], %)\n |> line([segEndX(thing, %), 0], %)\n |> line([-20, 10], %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -122541,7 +122541,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([20, 0], %)\n |> line([0, 3], %, \"thing\")\n |> line([-10, 0], %)\n |> line([0, segEndY(\"thing\", %)], %)\n |> line([-10, 0], %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
"const exampleSketch = startSketchOn('XZ')\n |> startProfileAt([0, 0], %)\n |> line([20, 0], %)\n |> line([0, 3], %, $thing)\n |> line([-10, 0], %)\n |> line([0, segEndY(thing, %)], %)\n |> line([-10, 0], %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -124597,7 +124597,7 @@
|
||||
"unpublished": false,
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn(\"XZ\")\n |> startProfileAt([0, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %, \"thing\")\n |> tangentialArc({ offset: -120, radius: 5 }, %)\n |> angledLine({\n angle: -60,\n length: segLen(\"thing\", %)\n }, %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
"const exampleSketch = startSketchOn(\"XZ\")\n |> startProfileAt([0, 0], %)\n |> angledLine({ angle: 60, length: 10 }, %, $thing)\n |> tangentialArc({ offset: -120, radius: 5 }, %)\n |> angledLine({ angle: -60, length: segLen(thing, %) }, %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -136709,7 +136709,7 @@
|
||||
"deprecated": false,
|
||||
"examples": [
|
||||
"const exampleSketch = startSketchOn(\"XY\")\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> line([0, 10], %)\n |> line([-10, 0], %)\n |> close(%)\n\nconst example = extrude(5, exampleSketch)\n\nconst exampleSketch002 = startSketchOn(example, 'end')\n |> startProfileAt([1, 1], %)\n |> line([8, 0], %)\n |> line([0, 8], %)\n |> line([-8, 0], %)\n |> close(%)\n\nconst example002 = extrude(5, exampleSketch002)\n\nconst exampleSketch003 = startSketchOn(example002, 'end')\n |> startProfileAt([2, 2], %)\n |> line([6, 0], %)\n |> line([0, 6], %)\n |> line([-6, 0], %)\n |> close(%)\n\nconst example003 = extrude(5, exampleSketch003)",
|
||||
"const exampleSketch = startSketchOn(\"XY\")\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> line([0, 10], %, 'sketchingFace')\n |> line([-10, 0], %)\n |> close(%)\n\nconst example = extrude(10, exampleSketch)\n\nconst exampleSketch002 = startSketchOn(example, 'sketchingFace')\n |> startProfileAt([1, 1], %)\n |> line([8, 0], %)\n |> line([0, 8], %)\n |> line([-8, 0], %)\n |> close(%, 'sketchingFace002')\n\nconst example002 = extrude(10, exampleSketch002)\n\nconst exampleSketch003 = startSketchOn(example002, 'sketchingFace002')\n |> startProfileAt([-8, 12], %)\n |> line([0, 6], %)\n |> line([6, 0], %)\n |> line([0, -6], %)\n |> close(%)\n\nconst example003 = extrude(5, exampleSketch003)",
|
||||
"const exampleSketch = startSketchOn(\"XY\")\n |> startProfileAt([0, 0], %)\n |> line([10, 0], %)\n |> line([0, 10], %, $sketchingFace)\n |> line([-10, 0], %)\n |> close(%)\n\nconst example = extrude(10, exampleSketch)\n\nconst exampleSketch002 = startSketchOn(example, 'sketchingFace')\n |> startProfileAt([1, 1], %)\n |> line([8, 0], %)\n |> line([0, 8], %)\n |> line([-8, 0], %)\n |> close(%, $sketchingFace002)\n\nconst example002 = extrude(10, exampleSketch002)\n\nconst exampleSketch003 = startSketchOn(example002, 'sketchingFace002')\n |> startProfileAt([-8, 12], %)\n |> line([0, 6], %)\n |> line([6, 0], %)\n |> line([0, -6], %)\n |> close(%)\n\nconst example003 = extrude(5, exampleSketch003)",
|
||||
"const exampleSketch = startSketchOn('XY')\n |> startProfileAt([4, 12], %)\n |> line([2, 0], %)\n |> line([0, -6], %)\n |> line([4, -6], %)\n |> line([0, -6], %)\n |> line([-3.75, -4.5], %)\n |> line([0, -5.5], %)\n |> line([-2, 0], %)\n |> close(%)\n\nconst example = revolve({ axis: 'y', angle: 180 }, exampleSketch)\n\nconst exampleSketch002 = startSketchOn(example, 'end')\n |> startProfileAt([4.5, -5], %)\n |> line([0, 5], %)\n |> line([5, 0], %)\n |> line([0, -5], %)\n |> close(%)\n\nconst example002 = extrude(5, exampleSketch002)",
|
||||
"const a1 = startSketchOn({\n plane: {\n origin: { x: 0, y: 0, z: 0 },\n x_axis: { x: 1, y: 0, z: 0 },\n y_axis: { x: 0, y: 1, z: 0 },\n z_axis: { x: 0, y: 0, z: 1 }\n }\n })\n |> startProfileAt([0, 0], %)\n |> line([100.0, 0], %)\n |> yLine(-100.0, %)\n |> xLine(-100.0, %)\n |> yLine(100.0, %)\n |> close(%)\n |> extrude(3.14, %)"
|
||||
]
|
||||
|
@ -191,9 +191,9 @@ async function doBasicSketch(page: Page, openPanes: string[]) {
|
||||
await u.openKclCodePanel()
|
||||
await expect(u.codeLocator).toHaveText(`const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt(${commonPoints.startAt}, %)
|
||||
|> line([${commonPoints.num1}, 0], %, 'seg01')
|
||||
|> line([${commonPoints.num1}, 0], %, $seg01)
|
||||
|> line([0, ${commonPoints.num1 + 0.01}], %)
|
||||
|> angledLine([180, segLen('seg01', %)], %)`)
|
||||
|> angledLine([180, segLen(seg01, %)], %)`)
|
||||
}
|
||||
|
||||
test.describe('Basic sketch', () => {
|
||||
@ -786,7 +786,7 @@ test('if you write invalid kcl you get inlined errors', async ({ page }) => {
|
||||
|
||||
// error text on hover
|
||||
await page.hover('.cm-lint-marker-error')
|
||||
await expect(page.getByText('syntax: Unexpected token')).toBeVisible()
|
||||
await expect(page.getByText('Unexpected token')).toBeVisible()
|
||||
|
||||
// select the line that's causing the error and delete it
|
||||
await page.getByText('$ error').click()
|
||||
@ -1646,8 +1646,10 @@ test.describe('Onboarding tests', () => {
|
||||
await page.waitForURL('**/file/**', { waitUntil: 'domcontentloaded' })
|
||||
|
||||
// Test that the text in this step is correct
|
||||
const avatarLocator = page.getByTestId('user-sidebar-toggle').locator('img')
|
||||
const onboardingOverlayLocator = page
|
||||
const avatarLocator = await page
|
||||
.getByTestId('user-sidebar-toggle')
|
||||
.locator('img')
|
||||
const onboardingOverlayLocator = await page
|
||||
.getByTestId('onboarding-content')
|
||||
.locator('div')
|
||||
.nth(1)
|
||||
@ -1657,6 +1659,18 @@ test.describe('Onboarding tests', () => {
|
||||
await expect(onboardingOverlayLocator).toBeVisible()
|
||||
await expect(onboardingOverlayLocator).toContainText('your avatar')
|
||||
|
||||
// This is to force the avatar to 404.
|
||||
// For our test image (only triggers locally. on CI, it's Kurt's /
|
||||
// gravatar image )
|
||||
await page.route('/cat.jpg', async (route) => {
|
||||
await route.fulfill({
|
||||
status: 404,
|
||||
contentType: 'text/plain',
|
||||
body: 'Not Found!',
|
||||
})
|
||||
})
|
||||
|
||||
// 404 the CI avatar image
|
||||
await page.route('https://lh3.googleusercontent.com/**', async (route) => {
|
||||
await route.fulfill({
|
||||
status: 404,
|
||||
@ -1891,7 +1905,7 @@ test.describe('Testing selections', () => {
|
||||
|> angledLine({ angle: 3 + 0, length: 3.14 + 0 }, %)
|
||||
|> lineTo([20.14 + 0, -0.14 + 0], %)
|
||||
|> xLineTo(29 + 0, %)
|
||||
|> yLine(-3.14 + 0, %, 'a')
|
||||
|> yLine(-3.14 + 0, %, $a)
|
||||
|> xLine(1.63, %)
|
||||
|> angledLineOfXLength({ angle: 3 + 0, length: 3.14 }, %)
|
||||
|> angledLineOfYLength({ angle: 30, length: 3 + 0 }, %)
|
||||
@ -1899,7 +1913,7 @@ test.describe('Testing selections', () => {
|
||||
|> angledLineToY({ angle: 30, to: 11.14 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 3.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 0
|
||||
}, %)
|
||||
|> tangentialArcTo([13.14 + 0, 13.14], %)
|
||||
@ -1983,7 +1997,7 @@ test.describe('Testing selections', () => {
|
||||
|> line([2.48, 2.44], %)
|
||||
|> line([2.66, 1.17], %)
|
||||
|> line([3.75, 0.46], %)
|
||||
|> line([4.99, -0.46], %, 'seg01')
|
||||
|> line([4.99, -0.46], %, $seg01)
|
||||
|> line([3.3, -2.12], %)
|
||||
|> line([2.16, -3.33], %)
|
||||
|> line([0.85, -3.08], %)
|
||||
@ -2005,7 +2019,7 @@ const extrude001 = extrude(10, sketch001)
|
||||
await u.closeDebugPanel()
|
||||
|
||||
const selectUnExtrudable = () =>
|
||||
page.getByText(`line([4.99, -0.46], %, 'seg01')`).click()
|
||||
page.getByText(`line([4.99, -0.46], %, $seg01)`).click()
|
||||
const clickEmpty = () => page.mouse.click(700, 460)
|
||||
await selectUnExtrudable()
|
||||
// expect extrude button to be disabled
|
||||
@ -2019,7 +2033,7 @@ const extrude001 = extrude(10, sketch001)
|
||||
await expect(page.getByRole('button', { name: 'Extrude' })).toBeDisabled()
|
||||
|
||||
const codeToAdd = `${await u.codeLocator.allInnerTexts()}
|
||||
const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
const sketch002 = startSketchOn(extrude001, $seg01)
|
||||
|> startProfileAt([-12.94, 6.6], %)
|
||||
|> line([2.45, -0.2], %)
|
||||
|> line([-2, -1.25], %)
|
||||
@ -2046,11 +2060,11 @@ const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
const cases = [
|
||||
{
|
||||
pos: [694, 185],
|
||||
expectedCode: "line([74.36, 130.4], %, 'seg01')",
|
||||
expectedCode: 'line([74.36, 130.4], %, $seg01)',
|
||||
},
|
||||
{
|
||||
pos: [816, 244],
|
||||
expectedCode: "angledLine([segAng('seg01', %), yo], %)",
|
||||
expectedCode: 'angledLine([segAng(seg01, %), yo], %)',
|
||||
},
|
||||
{
|
||||
pos: [1107, 161],
|
||||
@ -2912,7 +2926,7 @@ test('Can edit a sketch that has been extruded in the same pipe', async ({
|
||||
|
||||
const startPX = [665, 458]
|
||||
|
||||
const dragPX = 80
|
||||
const dragPX = 40
|
||||
|
||||
await page.getByText('startProfileAt([4.61, -14.01], %)').click()
|
||||
await expect(page.getByRole('button', { name: 'Edit Sketch' })).toBeVisible()
|
||||
@ -2925,7 +2939,7 @@ test('Can edit a sketch that has been extruded in the same pipe', async ({
|
||||
// drag startProfieAt handle
|
||||
await page.dragAndDrop('#stream', '#stream', {
|
||||
sourcePosition: { x: startPX[0], y: startPX[1] },
|
||||
targetPosition: { x: startPX[0] + dragPX, y: startPX[1] - dragPX },
|
||||
targetPosition: { x: startPX[0] + dragPX, y: startPX[1] + dragPX },
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
@ -2938,7 +2952,7 @@ test('Can edit a sketch that has been extruded in the same pipe', async ({
|
||||
await page.waitForTimeout(100)
|
||||
await page.dragAndDrop('#stream', '#stream', {
|
||||
sourcePosition: { x: lineEnd.x - 5, y: lineEnd.y },
|
||||
targetPosition: { x: lineEnd.x + dragPX, y: lineEnd.y - dragPX },
|
||||
targetPosition: { x: lineEnd.x + dragPX, y: lineEnd.y + dragPX },
|
||||
})
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
prevContent = await page.locator('.cm-content').innerText()
|
||||
@ -2949,7 +2963,7 @@ test('Can edit a sketch that has been extruded in the same pipe', async ({
|
||||
sourcePosition: { x: tangentEnd.x, y: tangentEnd.y - 5 },
|
||||
targetPosition: {
|
||||
x: tangentEnd.x + dragPX,
|
||||
y: tangentEnd.y - dragPX,
|
||||
y: tangentEnd.y + dragPX,
|
||||
},
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
@ -2958,10 +2972,10 @@ test('Can edit a sketch that has been extruded in the same pipe', async ({
|
||||
// expect the code to have changed
|
||||
await expect(page.locator('.cm-content'))
|
||||
.toHaveText(`const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([6.44, -12.07], %)
|
||||
|> line([14.72, 2.01], %)
|
||||
|> startProfileAt([7.12, -16.82], %)
|
||||
|> line([15.4, -2.74], %)
|
||||
|> tangentialArcTo([24.95, -5.38], %)
|
||||
|> line([1.97, 2.06], %)
|
||||
|> line([2.65, -2.69], %)
|
||||
|> close(%)
|
||||
|> extrude(5, %)`)
|
||||
})
|
||||
@ -3098,8 +3112,11 @@ const doSnapAtDifferentScales = async (
|
||||
await u.updateCamPosition(camPos)
|
||||
await u.closeDebugPanel()
|
||||
|
||||
await page.mouse.move(0, 0)
|
||||
|
||||
// select a plane
|
||||
await page.mouse.click(700, 200)
|
||||
await page.mouse.move(700, 200, { steps: 10 })
|
||||
await page.mouse.click(700, 200, { delay: 200 })
|
||||
await expect(page.locator('.cm-content')).toHaveText(
|
||||
`const sketch001 = startSketchOn('-XZ')`
|
||||
)
|
||||
@ -3112,26 +3129,29 @@ const doSnapAtDifferentScales = async (
|
||||
|
||||
// draw three lines
|
||||
await page.waitForTimeout(500)
|
||||
await page.mouse.click(pointA[0], pointA[1])
|
||||
await page.mouse.move(pointA[0], pointA[1], { steps: 10 })
|
||||
await page.mouse.click(pointA[0], pointA[1], { delay: 200 })
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
prevContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
await page.mouse.click(pointB[0], pointB[1])
|
||||
await page.mouse.move(pointB[0], pointB[1], { steps: 10 })
|
||||
await page.mouse.click(pointB[0], pointB[1], { delay: 200 })
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
prevContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
await page.mouse.click(pointC[0], pointC[1])
|
||||
await page.mouse.move(pointC[0], pointC[1], { steps: 10 })
|
||||
await page.mouse.click(pointC[0], pointC[1], { delay: 200 })
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
prevContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
await page.mouse.move(pointA[0] - 12, pointA[1] + 12)
|
||||
await page.mouse.move(pointA[0] - 12, pointA[1] + 12, { steps: 10 })
|
||||
const pointNotQuiteA = [pointA[0] - 7, pointA[1] + 7]
|
||||
await page.mouse.move(pointNotQuiteA[0], pointNotQuiteA[1], { steps: 10 })
|
||||
|
||||
await page.mouse.click(pointNotQuiteA[0], pointNotQuiteA[1])
|
||||
await page.mouse.click(pointNotQuiteA[0], pointNotQuiteA[1], { delay: 200 })
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(prevContent)
|
||||
prevContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
@ -3206,6 +3226,7 @@ test('Sketch on face', async ({ page }) => {
|
||||
true
|
||||
)
|
||||
await page.waitForTimeout(150)
|
||||
await u.closeDebugPanel()
|
||||
|
||||
const firstClickPosition = [612, 238]
|
||||
const secondClickPosition = [661, 242]
|
||||
@ -3231,7 +3252,7 @@ test('Sketch on face', async ({ page }) => {
|
||||
previousCodeContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
await expect(page.locator('.cm-content'))
|
||||
.toContainText(`const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
.toContainText(`const sketch002 = startSketchOn(extrude001, seg01)
|
||||
|> startProfileAt([-12.94, 6.6], %)
|
||||
|> line([2.45, -0.2], %)
|
||||
|> line([-2.6, -1.25], %)
|
||||
@ -3267,7 +3288,7 @@ test('Sketch on face', async ({ page }) => {
|
||||
await expect(page.locator('.cm-content')).not.toHaveText(previousCodeContent)
|
||||
previousCodeContent = await page.locator('.cm-content').innerText()
|
||||
|
||||
const result = makeTemplate`const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
const result = makeTemplate`const sketch002 = startSketchOn(extrude001, seg01)
|
||||
|> startProfileAt([-12.83, 6.7], %)
|
||||
|> line([${[2.28, 2.35]}, -${0.07}], %)
|
||||
|> line([-3.05, -1.47], %)
|
||||
@ -3406,15 +3427,15 @@ test.describe('Testing constraints', () => {
|
||||
`const yo = 79
|
||||
const part001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-7.54, -26.74], %)
|
||||
|> line([74.36, 130.4], %, 'seg01')
|
||||
|> line([74.36, 130.4], %, $seg01)
|
||||
|> line([78.92, -120.11], %)
|
||||
|> angledLine([segAng('seg01', %), yo], %)
|
||||
|> angledLine([segAng(seg01, %), yo], %)
|
||||
|> line([41.19, 28.97 + 5], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3423,7 +3444,7 @@ const part002 = startSketchOn('XZ')
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await page.getByText("line([74.36, 130.4], %, 'seg01')").click()
|
||||
await page.getByText('line([74.36, 130.4], %, $seg01)').click()
|
||||
await page.getByRole('button', { name: 'Edit Sketch' }).click()
|
||||
|
||||
const line3 = await u.getSegmentBodyCoords(`[data-overlay-index="${2}"]`)
|
||||
@ -3466,15 +3487,15 @@ const part002 = startSketchOn('XZ')
|
||||
`const yo = 5
|
||||
const part001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-7.54, -26.74], %)
|
||||
|> line([74.36, 130.4], %, 'seg01')
|
||||
|> line([74.36, 130.4], %, $seg01)
|
||||
|> line([78.92, -120.11], %)
|
||||
|> angledLine([segAng('seg01', %), 78.33], %)
|
||||
|> angledLine([segAng(seg01, %), 78.33], %)
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3483,7 +3504,7 @@ const part002 = startSketchOn('XZ')
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await page.getByText("line([74.36, 130.4], %, 'seg01')").click()
|
||||
await page.getByText('line([74.36, 130.4], %, $seg01)').click()
|
||||
await page.getByRole('button', { name: 'Edit Sketch' }).click()
|
||||
|
||||
const [line1, line3] = await Promise.all([
|
||||
@ -3525,7 +3546,7 @@ const part002 = startSketchOn('XZ')
|
||||
|
||||
const activeLinesContent = await page.locator('.cm-activeLine').all()
|
||||
await expect(activeLinesContent[0]).toHaveText(
|
||||
`|> line([74.36, 130.4], %, 'seg01')`
|
||||
`|> line([74.36, 130.4], %, $seg01)`
|
||||
)
|
||||
await expect(activeLinesContent[1]).toHaveText(`}, %)`)
|
||||
|
||||
@ -3539,22 +3560,22 @@ const part002 = startSketchOn('XZ')
|
||||
{
|
||||
testName: 'Add variable',
|
||||
constraint: 'horizontal distance',
|
||||
value: "segEndX('seg01', %) + xDis001, 61.34",
|
||||
value: 'segEndX(seg01, %) + xDis001, 61.34',
|
||||
},
|
||||
{
|
||||
testName: 'No variable',
|
||||
constraint: 'horizontal distance',
|
||||
value: "segEndX('seg01', %) + 88.08, 61.34",
|
||||
value: 'segEndX(seg01, %) + 88.08, 61.34',
|
||||
},
|
||||
{
|
||||
testName: 'Add variable',
|
||||
constraint: 'vertical distance',
|
||||
value: "154.9, segEndY('seg01', %) - yDis001",
|
||||
value: '154.9, segEndY(seg01, %) - yDis001',
|
||||
},
|
||||
{
|
||||
testName: 'No variable',
|
||||
constraint: 'vertical distance',
|
||||
value: "154.9, segEndY('seg01', %) - 42.32",
|
||||
value: '154.9, segEndY(seg01, %) - 42.32',
|
||||
},
|
||||
] as const
|
||||
for (const { testName, value, constraint } of cases) {
|
||||
@ -3571,9 +3592,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3618,7 +3639,7 @@ const part002 = startSketchOn('XZ')
|
||||
|
||||
// checking activeLines assures the cursors are where they should be
|
||||
const codeAfter = [
|
||||
`|> line([74.36, 130.4], %, 'seg01')`,
|
||||
`|> line([74.36, 130.4], %, $seg01)`,
|
||||
`|> lineTo([${value}], %)`,
|
||||
]
|
||||
|
||||
@ -3679,9 +3700,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3751,13 +3772,13 @@ const part002 = startSketchOn('XZ')
|
||||
testName: 'Add variable',
|
||||
addVariable: true,
|
||||
axisSelect: false,
|
||||
value: "segAng('seg01', %) + angle001",
|
||||
value: 'segAng(seg01, %) + angle001',
|
||||
},
|
||||
{
|
||||
testName: 'No variable',
|
||||
addVariable: false,
|
||||
axisSelect: false,
|
||||
value: "segAng('seg01', %) + 22.69",
|
||||
value: 'segAng(seg01, %) + 22.69',
|
||||
},
|
||||
{
|
||||
testName: 'Add variable, selecting axis',
|
||||
@ -3786,9 +3807,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3834,7 +3855,7 @@ const part002 = startSketchOn('XZ')
|
||||
|
||||
// checking activeLines assures the cursors are where they should be
|
||||
const codeAfter = [
|
||||
"|> line([74.36, 130.4], %, 'seg01')",
|
||||
'|> line([74.36, 130.4], %, $seg01)',
|
||||
`|> angledLine([${value}, 78.33], %)`,
|
||||
]
|
||||
if (axisSelect) codeAfter.shift()
|
||||
@ -3896,9 +3917,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -3972,9 +3993,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([41.19, 28.97], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -4039,19 +4060,19 @@ const part002 = startSketchOn('XZ')
|
||||
test.describe('Two segment - no modal constraints', () => {
|
||||
const cases = [
|
||||
{
|
||||
codeAfter: `|> angledLine([83, segLen('seg01', %)], %)`,
|
||||
codeAfter: `|> angledLine([83, segLen(seg01, %)], %)`,
|
||||
constraintName: 'Equal Length',
|
||||
},
|
||||
{
|
||||
codeAfter: `|> angledLine([segAng('seg01', %), 78.33], %)`,
|
||||
codeAfter: `|> angledLine([segAng(seg01, %), 78.33], %)`,
|
||||
constraintName: 'Parallel',
|
||||
},
|
||||
{
|
||||
codeAfter: `|> lineTo([segEndX('seg01', %), 61.34], %)`,
|
||||
codeAfter: `|> lineTo([segEndX(seg01, %), 61.34], %)`,
|
||||
constraintName: 'Vertically Align',
|
||||
},
|
||||
{
|
||||
codeAfter: `|> lineTo([154.9, segEndY('seg01', %)], %)`,
|
||||
codeAfter: `|> lineTo([154.9, segEndY(seg01, %)], %)`,
|
||||
constraintName: 'Horizontally Align',
|
||||
},
|
||||
] as const
|
||||
@ -4068,9 +4089,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([9.16, 77.79], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -4113,7 +4134,7 @@ const part002 = startSketchOn('XZ')
|
||||
|
||||
// check both cursors are where they should be after constraint is applied
|
||||
await expect(activeLinesContent[0]).toHaveText(
|
||||
"|> line([74.36, 130.4], %, 'seg01')"
|
||||
'|> line([74.36, 130.4], %, $seg01)'
|
||||
)
|
||||
await expect(activeLinesContent[1]).toHaveText(codeAfter)
|
||||
})
|
||||
@ -4145,9 +4166,9 @@ const part001 = startSketchOn('XZ')
|
||||
|> line([9.16, 77.79], %)
|
||||
const part002 = startSketchOn('XZ')
|
||||
|> startProfileAt([299.05, 231.45], %)
|
||||
|> xLine(-425.34, %, 'seg-what')
|
||||
|> xLine(-425.34, %, $seg_what)
|
||||
|> yLine(-264.06, %)
|
||||
|> xLine(segLen('seg-what', %), %)
|
||||
|> xLine(segLen(seg_what, %), %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)`
|
||||
)
|
||||
})
|
||||
@ -4194,7 +4215,7 @@ const part002 = startSketchOn('XZ')
|
||||
'persistCode',
|
||||
`const sketch001 = startSketchOn('XY')
|
||||
|> startProfileAt([-1.05, -1.07], %)
|
||||
|> line([3.79, 2.68], %, 'seg01')
|
||||
|> line([3.79, 2.68], %, $seg01)
|
||||
|> line([3.13, -2.4], %)`
|
||||
)
|
||||
})
|
||||
@ -4203,7 +4224,7 @@ const part002 = startSketchOn('XZ')
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await page.getByText("line([3.79, 2.68], %, 'seg01')").click()
|
||||
await page.getByText('line([3.79, 2.68], %, $seg01)').click()
|
||||
await page.getByRole('button', { name: 'Edit Sketch' }).click()
|
||||
|
||||
await page.waitForTimeout(100)
|
||||
@ -4438,7 +4459,7 @@ test.describe('Testing segment overlays', () => {
|
||||
|> angledLine({ angle: 3 + 0, length: 32 + 0 }, %)
|
||||
|> lineTo([5 + 33, 20 + 11.5 + 0], %)
|
||||
|> xLineTo(5 + 9 - 5, %)
|
||||
|> yLineTo(20 + -10.77, %, 'a')
|
||||
|> yLineTo(20 + -10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -4447,7 +4468,7 @@ test.describe('Testing segment overlays', () => {
|
||||
|> angledLineToY({ angle: 89, to: 20 + 9.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)
|
||||
|> tangentialArcTo([5 + 3.14 + 13, 20 + 3.14], %)
|
||||
@ -4606,7 +4627,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLine({ angle: angle001, length: len001 }, %)
|
||||
|> lineTo([33, yAbs001], %)
|
||||
|> xLineTo(xAbs002, %)
|
||||
|> yLineTo(-10.77, %, 'a')
|
||||
|> yLineTo(-10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -4643,9 +4664,9 @@ const part001 = startSketchOn('XZ')
|
||||
await clickUnconstrained({
|
||||
hoverPos: { x: yLineTo.x, y: yLineTo.y },
|
||||
constraintType: 'yAbsolute',
|
||||
expectBeforeUnconstrained: "yLineTo(-10.77, %, 'a')",
|
||||
expectAfterUnconstrained: "yLineTo(yAbs002, %, 'a')",
|
||||
expectFinal: "yLineTo(-10.77, %, 'a')",
|
||||
expectBeforeUnconstrained: 'yLineTo(-10.77, %, $a)',
|
||||
expectAfterUnconstrained: 'yLineTo(yAbs002, %, $a)',
|
||||
expectFinal: 'yLineTo(-10.77, %, $a)',
|
||||
ang: ang + 180,
|
||||
locator: '[data-overlay-toolbar-index="4"]',
|
||||
})
|
||||
@ -4676,7 +4697,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 3 + 0, length: 32 + 0 }, %)
|
||||
|> lineTo([33, 11.5 + 0], %)
|
||||
|> xLineTo(9 - 5, %)
|
||||
|> yLineTo(-10.77, %, 'a')
|
||||
|> yLineTo(-10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -4685,7 +4706,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLineToY({ angle: 89, to: 9.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)
|
||||
|> tangentialArcTo([3.14 + 13, 3.14], %)
|
||||
@ -4804,7 +4825,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 3 + 0, length: 32 + 0 }, %)
|
||||
|> lineTo([33, 11.5 + 0], %)
|
||||
|> xLineTo(9 - 5, %)
|
||||
|> yLineTo(-10.77, %, 'a')
|
||||
|> yLineTo(-10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -4813,7 +4834,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLineToY({ angle: 89, to: 9.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)
|
||||
|> tangentialArcTo([3.14 + 13, 1.14], %)
|
||||
@ -4908,18 +4929,18 @@ const part001 = startSketchOn('XZ')
|
||||
constraintType: 'angle',
|
||||
expectBeforeUnconstrained: `angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)`,
|
||||
expectAfterUnconstrained: `angledLineThatIntersects({
|
||||
angle: angle003,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)`,
|
||||
expectFinal: `angledLineThatIntersects({
|
||||
angle: -176,
|
||||
offset: 9,
|
||||
intersectTag: 'a'
|
||||
intersectTag: a
|
||||
}, %)`,
|
||||
ang: ang + 180,
|
||||
locator: '[data-overlay-toolbar-index="11"]',
|
||||
@ -4934,17 +4955,17 @@ const part001 = startSketchOn('XZ')
|
||||
expectBeforeUnconstrained: `angledLineThatIntersects({
|
||||
angle: -176,
|
||||
offset: 9,
|
||||
intersectTag: 'a'
|
||||
intersectTag: a
|
||||
}, %)`,
|
||||
expectAfterUnconstrained: `angledLineThatIntersects({
|
||||
angle: -176,
|
||||
offset: perpDist001,
|
||||
intersectTag: 'a'
|
||||
intersectTag: a
|
||||
}, %)`,
|
||||
expectFinal: `angledLineThatIntersects({
|
||||
angle: -176,
|
||||
offset: 9,
|
||||
intersectTag: 'a'
|
||||
intersectTag: a
|
||||
}, %)`,
|
||||
ang: ang + 180,
|
||||
locator: '[data-overlay-toolbar-index="11"]',
|
||||
@ -4960,7 +4981,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 3 + 0, length: 32 + 0 }, %)
|
||||
|> lineTo([33, 11.5 + 0], %)
|
||||
|> xLineTo(9 - 5, %)
|
||||
|> yLineTo(-10.77, %, 'a')
|
||||
|> yLineTo(-10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -4969,7 +4990,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLineToY({ angle: 89, to: 9.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)
|
||||
|> tangentialArcTo([3.14 + 13, -3.14], %)
|
||||
@ -5073,7 +5094,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLine({ angle: 3 + 0, length: 32 + 0 }, %)
|
||||
|> lineTo([33, 11.5 + 0], %)
|
||||
|> xLineTo(9 - 5, %)
|
||||
|> yLineTo(-10.77, %, 'a')
|
||||
|> yLineTo(-10.77, %, $a)
|
||||
|> xLine(26.04, %)
|
||||
|> yLine(21.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 181 + 0, length: 23.14 }, %)
|
||||
@ -5082,7 +5103,7 @@ const part001 = startSketchOn('XZ')
|
||||
|> angledLineToY({ angle: 89, to: 9.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)
|
||||
|> tangentialArcTo([3.14 + 13, 1.14], %)
|
||||
@ -5129,7 +5150,7 @@ const part001 = startSketchOn('XZ')
|
||||
hoverPos: { x: segmentToDelete.x, y: segmentToDelete.y },
|
||||
codeToBeDeleted: `angledLineThatIntersects({
|
||||
angle: 4.14,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 9
|
||||
}, %)`,
|
||||
stdLibFnName: 'angledLineThatIntersects',
|
||||
@ -5204,7 +5225,7 @@ const part001 = startSketchOn('XZ')
|
||||
ang = await u.getAngle(`[data-overlay-index="${4}"]`)
|
||||
await deleteSegmentSequence({
|
||||
hoverPos: { x: segmentToDelete.x, y: segmentToDelete.y },
|
||||
codeToBeDeleted: "yLineTo(-10.77, %, 'a')",
|
||||
codeToBeDeleted: 'yLineTo(-10.77, %, $a)',
|
||||
stdLibFnName: 'yLineTo',
|
||||
ang: ang + 180,
|
||||
locator: '[data-overlay-toolbar-index="4"]',
|
||||
@ -5278,20 +5299,20 @@ const part001 = startSketchOn('XZ')
|
||||
})
|
||||
test.describe('Testing delete with dependent segments', () => {
|
||||
const cases = [
|
||||
"line([22, 2], %, 'seg01')",
|
||||
"angledLine([5, 23.03], %, 'seg01')",
|
||||
"xLine(23, %, 'seg01')",
|
||||
"yLine(-8, %, 'seg01')",
|
||||
"xLineTo(30, %, 'seg01')",
|
||||
"yLineTo(-4, %, 'seg01')",
|
||||
"angledLineOfXLength([3, 30], %, 'seg01')",
|
||||
"angledLineOfXLength({ angle: 3, length: 30 }, %, 'seg01')",
|
||||
"angledLineOfYLength([3, 1.5], %, 'seg01')",
|
||||
"angledLineOfYLength({ angle: 3, length: 1.5 }, %, 'seg01')",
|
||||
"angledLineToX([3, 30], %, 'seg01')",
|
||||
"angledLineToX({ angle: 3, to: 30 }, %, 'seg01')",
|
||||
"angledLineToY([3, 7], %, 'seg01')",
|
||||
"angledLineToY({ angle: 3, to: 7 }, %, 'seg01')",
|
||||
'line([22, 2], %, $seg01)',
|
||||
'angledLine([5, 23.03], %, $seg01)',
|
||||
'xLine(23, %, $seg01)',
|
||||
'yLine(-8, %, $seg01)',
|
||||
'xLineTo(30, %, $seg01)',
|
||||
'yLineTo(-4, %, $seg01)',
|
||||
'angledLineOfXLength([3, 30], %, $seg01)',
|
||||
'angledLineOfXLength({ angle: 3, length: 30 }, %, $seg01)',
|
||||
'angledLineOfYLength([3, 1.5], %, $seg01)',
|
||||
'angledLineOfYLength({ angle: 3, length: 1.5 }, %, $seg01)',
|
||||
'angledLineToX([3, 30], %, $seg01)',
|
||||
'angledLineToX({ angle: 3, to: 30 }, %, $seg01)',
|
||||
'angledLineToY([3, 7], %, $seg01)',
|
||||
'angledLineToY({ angle: 3, to: 7 }, %, $seg01)',
|
||||
]
|
||||
for (const doesHaveTagOutsideSketch of [true, false]) {
|
||||
for (const lineOfInterest of cases) {
|
||||
@ -5307,8 +5328,8 @@ const part001 = startSketchOn('XZ')
|
||||
|> startProfileAt([5, 6], %)
|
||||
|> ${lineToBeDeleted}
|
||||
|> line([-10, -15], %)
|
||||
|> angledLine([-176, segLen('seg01', %)], %)
|
||||
${extraLine ? "const myVar = segLen('seg01', part001)" : ''}`
|
||||
|> angledLine([-176, segLen(seg01, %)], %)
|
||||
${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
||||
)
|
||||
},
|
||||
{
|
||||
@ -5394,61 +5415,61 @@ ${extraLine ? "const myVar = segLen('seg01', part001)" : ''}`
|
||||
test.describe('Testing remove constraints segments', () => {
|
||||
const cases = [
|
||||
{
|
||||
before: `line([22 + 0, 2 + 0], %, 'seg01')`,
|
||||
after: `line([22, 2], %, 'seg01')`,
|
||||
before: `line([22 + 0, 2 + 0], %, $seg01)`,
|
||||
after: `line([22, 2], %, $seg01)`,
|
||||
},
|
||||
|
||||
{
|
||||
before: `angledLine([5 + 0, 23.03 + 0], %, 'seg01')`,
|
||||
after: `line([22.94, 2.01], %, 'seg01')`,
|
||||
before: `angledLine([5 + 0, 23.03 + 0], %, $seg01)`,
|
||||
after: `line([22.94, 2.01], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `xLine(23 + 0, %, 'seg01')`,
|
||||
after: `line([23, 0], %, 'seg01')`,
|
||||
before: `xLine(23 + 0, %, $seg01)`,
|
||||
after: `line([23, 0], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `yLine(-8 + 0, %, 'seg01')`,
|
||||
after: `line([0, -8], %, 'seg01')`,
|
||||
before: `yLine(-8 + 0, %, $seg01)`,
|
||||
after: `line([0, -8], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `xLineTo(30 + 0, %, 'seg01')`,
|
||||
after: `line([25, 0], %, 'seg01')`,
|
||||
before: `xLineTo(30 + 0, %, $seg01)`,
|
||||
after: `line([25, 0], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `yLineTo(-4 + 0, %, 'seg01')`,
|
||||
after: `line([0, -10], %, 'seg01')`,
|
||||
before: `yLineTo(-4 + 0, %, $seg01)`,
|
||||
after: `line([0, -10], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineOfXLength([3 + 0, 30 + 0], %, 'seg01')`,
|
||||
after: `line([30, 1.57], %, 'seg01')`,
|
||||
before: `angledLineOfXLength([3 + 0, 30 + 0], %, $seg01)`,
|
||||
after: `line([30, 1.57], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineOfYLength([3 + 0, 1.5 + 0], %, 'seg01')`,
|
||||
after: `line([28.62, 1.5], %, 'seg01')`,
|
||||
before: `angledLineOfYLength([3 + 0, 1.5 + 0], %, $seg01)`,
|
||||
after: `line([28.62, 1.5], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineToX([3 + 0, 30 + 0], %, 'seg01')`,
|
||||
after: `line([25, 1.31], %, 'seg01')`,
|
||||
before: `angledLineToX([3 + 0, 30 + 0], %, $seg01)`,
|
||||
after: `line([25, 1.31], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineToY([3 + 0, 7 + 0], %, 'seg01')`,
|
||||
after: `line([19.08, 1], %, 'seg01')`,
|
||||
before: `angledLineToY([3 + 0, 7 + 0], %, $seg01)`,
|
||||
after: `line([19.08, 1], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineOfXLength({ angle: 3 + 0, length: 30 + 0 }, %, 'seg01')`,
|
||||
after: `line([30, 1.57], %, 'seg01')`,
|
||||
before: `angledLineOfXLength({ angle: 3 + 0, length: 30 + 0 }, %, $seg01)`,
|
||||
after: `line([30, 1.57], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineOfYLength({ angle: 3 + 0, length: 1.5 + 0 }, %, 'seg01')`,
|
||||
after: `line([28.62, 1.5], %, 'seg01')`,
|
||||
before: `angledLineOfYLength({ angle: 3 + 0, length: 1.5 + 0 }, %, $seg01)`,
|
||||
after: `line([28.62, 1.5], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineToX({ angle: 3 + 0, to: 30 + 0 }, %, 'seg01')`,
|
||||
after: `line([25, 1.31], %, 'seg01')`,
|
||||
before: `angledLineToX({ angle: 3 + 0, to: 30 + 0 }, %, $seg01)`,
|
||||
after: `line([25, 1.31], %, $seg01)`,
|
||||
},
|
||||
{
|
||||
before: `angledLineToY({ angle: 3 + 0, to: 7 + 0 }, %, 'seg01')`,
|
||||
after: `line([19.08, 1], %, 'seg01')`,
|
||||
before: `angledLineToY({ angle: 3 + 0, to: 7 + 0 }, %, $seg01)`,
|
||||
after: `line([19.08, 1], %, $seg01)`,
|
||||
},
|
||||
]
|
||||
|
||||
@ -5465,7 +5486,7 @@ ${extraLine ? "const myVar = segLen('seg01', part001)" : ''}`
|
||||
|> startProfileAt([5, 6], %)
|
||||
|> ${lineToBeDeleted}
|
||||
|> line([-10, -15], %)
|
||||
|> angledLine([-176, segLen('seg01', %)], %)`
|
||||
|> angledLine([-176, segLen(seg01, %)], %)`
|
||||
)
|
||||
},
|
||||
{
|
||||
@ -6143,27 +6164,27 @@ const part001 = startSketchOn('-XZ')
|
||||
|> angledLineToY({
|
||||
angle: topAng,
|
||||
to: totalHeightHalf,
|
||||
}, %, 'seg04')
|
||||
|> xLineTo(totalLen, %, 'seg03')
|
||||
|> yLine(-armThick, %, 'seg01')
|
||||
}, %, $seg04)
|
||||
|> xLineTo(totalLen, %, $seg03)
|
||||
|> yLine(-armThick, %, $seg01)
|
||||
|> angledLineThatIntersects({
|
||||
angle: HALF_TURN,
|
||||
offset: -armThick,
|
||||
intersectTag: 'seg04'
|
||||
intersectTag: seg04
|
||||
}, %)
|
||||
|> angledLineToY([segAng('seg04', %) + 180, ZERO], %)
|
||||
|> angledLineToY([segAng(seg04, %) + 180, ZERO], %)
|
||||
|> angledLineToY({
|
||||
angle: -bottomAng,
|
||||
to: -totalHeightHalf - armThick,
|
||||
}, %, 'seg02')
|
||||
|> xLineTo(segEndX('seg03', %) + 0, %)
|
||||
|> yLine(-segLen('seg01', %), %)
|
||||
}, %, $seg02)
|
||||
|> xLineTo(segEndX(seg03, %) + 0, %)
|
||||
|> yLine(-segLen(seg01, %), %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: HALF_TURN,
|
||||
offset: -armThick,
|
||||
intersectTag: 'seg02'
|
||||
intersectTag: seg02
|
||||
}, %)
|
||||
|> angledLineToY([segAng('seg02', %) + 180, -baseHeight], %)
|
||||
|> angledLineToY([segAng(seg02, %) + 180, -baseHeight], %)
|
||||
|> xLineTo(ZERO, %)
|
||||
|> close(%)
|
||||
|> extrude(4, %)`
|
||||
|
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 44 KiB |
Before Width: | Height: | Size: 66 KiB After Width: | Height: | Size: 69 KiB |
Before Width: | Height: | Size: 71 KiB After Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 75 KiB After Width: | Height: | Size: 60 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 28 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 30 KiB |
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 29 KiB |
Before Width: | Height: | Size: 47 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 27 KiB |
@ -12,7 +12,6 @@
|
||||
"@headlessui/tailwindcss": "^0.2.0",
|
||||
"@kittycad/lib": "^0.0.67",
|
||||
"@lezer/javascript": "^1.4.9",
|
||||
"@open-rpc/client-js": "^1.8.1",
|
||||
"@react-hook/resize-observer": "^2.0.1",
|
||||
"@replit/codemirror-interact": "^6.3.1",
|
||||
"@tauri-apps/api": "2.0.0-beta.12",
|
||||
@ -42,7 +41,7 @@
|
||||
"fuse.js": "^7.0.0",
|
||||
"html2canvas-pro": "^1.4.3",
|
||||
"http-server": "^14.1.1",
|
||||
"json-rpc-2.0": "^1.6.0",
|
||||
"json-rpc-2.0": "^1.7.0",
|
||||
"jszip": "^3.10.1",
|
||||
"node-fetch": "^3.3.2",
|
||||
"re-resizable": "^6.9.11",
|
||||
@ -62,7 +61,8 @@
|
||||
"ua-parser-js": "^1.0.37",
|
||||
"uuid": "^9.0.1",
|
||||
"vitest": "^1.6.0",
|
||||
"vscode-jsonrpc": "^8.2.1",
|
||||
"vscode-languageclient": "^9.0.1",
|
||||
"vscode-languageserver": "^9.0.1",
|
||||
"vscode-languageserver-protocol": "^3.17.5",
|
||||
"wasm-pack": "^0.12.1",
|
||||
"web-vitals": "^3.5.2",
|
||||
|
@ -12,13 +12,13 @@ import { defineConfig, devices } from '@playwright/test'
|
||||
export default defineConfig({
|
||||
testDir: './e2e/playwright',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: false,
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 3 : 0,
|
||||
/* Different amount of parallelism on CI and local. */
|
||||
workers: process.env.CI ? 1 : 4,
|
||||
workers: process.env.CI ? 4 : 4,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: 'html',
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
|
BIN
public/cat.jpg
Normal file
After Width: | Height: | Size: 193 KiB |
31
src-tauri/Cargo.lock
generated
@ -405,12 +405,6 @@ dependencies = [
|
||||
"system-deps",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
@ -1146,6 +1140,20 @@ dependencies = [
|
||||
"parking_lot_core 0.9.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dashmap"
|
||||
version = "6.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
"hashbrown 0.14.3",
|
||||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core 0.9.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.5.0"
|
||||
@ -2568,7 +2576,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.1.65"
|
||||
version = "0.1.67"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx",
|
||||
@ -2578,7 +2586,7 @@ dependencies = [
|
||||
"bson",
|
||||
"chrono",
|
||||
"clap",
|
||||
"dashmap",
|
||||
"dashmap 6.0.1",
|
||||
"databake",
|
||||
"derive-docs",
|
||||
"form_urlencoded",
|
||||
@ -5866,7 +5874,7 @@ dependencies = [
|
||||
"async-trait",
|
||||
"auto_impl",
|
||||
"bytes",
|
||||
"dashmap",
|
||||
"dashmap 5.5.3",
|
||||
"futures",
|
||||
"httparse",
|
||||
"lsp-types",
|
||||
@ -6216,11 +6224,10 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.8.0"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0"
|
||||
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
|
||||
dependencies = [
|
||||
"atomic",
|
||||
"getrandom 0.2.14",
|
||||
"serde",
|
||||
"wasm-bindgen",
|
||||
|
@ -1783,6 +1783,31 @@ function prepareTruncatedMemoryAndAst(
|
||||
const programMemoryOverride = programMemoryInit()
|
||||
if (err(programMemoryOverride)) return programMemoryOverride
|
||||
|
||||
// Grab all the TagDeclarators and TagIdentifiers from memory.
|
||||
let start = _node.node.start
|
||||
for (const key in programMemory.root) {
|
||||
const value = programMemory.root[key]
|
||||
if (!('__meta' in value)) {
|
||||
continue
|
||||
}
|
||||
if (
|
||||
value.__meta === undefined ||
|
||||
value.__meta.length === 0 ||
|
||||
value.__meta[0].sourceRange === undefined
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (value.__meta[0].sourceRange[0] >= start) {
|
||||
// We only want things before our start point.
|
||||
continue
|
||||
}
|
||||
|
||||
if (value.type === 'TagIdentifier') {
|
||||
programMemoryOverride.root[key] = JSON.parse(JSON.stringify(value))
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < bodyIndex; i++) {
|
||||
const node = _ast.body[i]
|
||||
if (node.type !== 'VariableDeclaration') {
|
||||
|
@ -7,8 +7,7 @@ import React, {
|
||||
useContext,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
||||
import Client from '../editor/plugins/lsp/client'
|
||||
import LspServerClient from '../editor/plugins/lsp/client'
|
||||
import { TEST, VITE_KC_API_BASE_URL } from 'env'
|
||||
import kclLanguage from 'editor/plugins/lsp/kcl/language'
|
||||
import { copilotPlugin } from 'editor/plugins/lsp/copilot'
|
||||
@ -19,9 +18,7 @@ import { LanguageSupport } from '@codemirror/language'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { paths } from 'lib/paths'
|
||||
import { FileEntry } from 'lib/types'
|
||||
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
||||
import {
|
||||
LspWorkerEventType,
|
||||
KclWorkerOptions,
|
||||
CopilotWorkerOptions,
|
||||
LspWorker,
|
||||
@ -30,7 +27,6 @@ import { wasmUrl } from 'lang/wasm'
|
||||
import { PROJECT_ENTRYPOINT } from 'lib/constants'
|
||||
import { useNetworkContext } from 'hooks/useNetworkContext'
|
||||
import { NetworkHealthState } from 'hooks/useNetworkStatus'
|
||||
import { err, trap } from 'lib/trap'
|
||||
|
||||
function getWorkspaceFolders(): LSP.WorkspaceFolder[] {
|
||||
return []
|
||||
@ -107,32 +103,23 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return { lspClient: null }
|
||||
}
|
||||
|
||||
const lspWorker = new Worker({ name: 'kcl' })
|
||||
const initEvent: KclWorkerOptions = {
|
||||
wasmUrl: wasmUrl(),
|
||||
const options: KclWorkerOptions = {
|
||||
token: token,
|
||||
baseUnit: defaultUnit.current,
|
||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||
callback: () => {
|
||||
setIsLspReady(true)
|
||||
},
|
||||
wasmUrl: wasmUrl(),
|
||||
}
|
||||
lspWorker.postMessage({
|
||||
worker: LspWorker.Kcl,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: initEvent,
|
||||
|
||||
const lsp = new LspServerClient({ worker: LspWorker.Kcl, options })
|
||||
lsp.startServer()
|
||||
|
||||
const lspClient = new LanguageServerClient({
|
||||
client: lsp,
|
||||
name: LspWorker.Kcl,
|
||||
})
|
||||
lspWorker.onmessage = function (e) {
|
||||
if (err(fromServer)) return
|
||||
fromServer.add(e.data)
|
||||
}
|
||||
|
||||
const intoServer: IntoServer = new IntoServer(LspWorker.Kcl, lspWorker)
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
if (err(fromServer)) return { lspClient: null }
|
||||
|
||||
const client = new Client(fromServer, intoServer)
|
||||
|
||||
setIsLspReady(true)
|
||||
|
||||
const lspClient = new LanguageServerClient({ client, name: LspWorker.Kcl })
|
||||
return { lspClient }
|
||||
}, [
|
||||
// We need a token for authenticating the server.
|
||||
@ -185,32 +172,19 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return { lspClient: null }
|
||||
}
|
||||
|
||||
const lspWorker = new Worker({ name: 'copilot' })
|
||||
const initEvent: CopilotWorkerOptions = {
|
||||
wasmUrl: wasmUrl(),
|
||||
const options: CopilotWorkerOptions = {
|
||||
token: token,
|
||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||
callback: () => {
|
||||
setIsCopilotReady(true)
|
||||
},
|
||||
wasmUrl: wasmUrl(),
|
||||
}
|
||||
lspWorker.postMessage({
|
||||
worker: LspWorker.Copilot,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: initEvent,
|
||||
})
|
||||
lspWorker.onmessage = function (e) {
|
||||
if (err(fromServer)) return
|
||||
fromServer.add(e.data)
|
||||
}
|
||||
|
||||
const intoServer: IntoServer = new IntoServer(LspWorker.Copilot, lspWorker)
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
if (err(fromServer)) return { lspClient: null }
|
||||
|
||||
const client = new Client(fromServer, intoServer)
|
||||
|
||||
setIsCopilotReady(true)
|
||||
const lsp = new LspServerClient({ worker: LspWorker.Copilot, options })
|
||||
lsp.startServer()
|
||||
|
||||
const lspClient = new LanguageServerClient({
|
||||
client,
|
||||
client: lsp,
|
||||
name: LspWorker.Copilot,
|
||||
})
|
||||
return { lspClient }
|
||||
|
@ -127,6 +127,7 @@ export const Stream = ({ className = '' }: { className?: string }) => {
|
||||
return (
|
||||
<div
|
||||
className="absolute inset-0 z-0"
|
||||
id="stream"
|
||||
data-testid="stream"
|
||||
onMouseUp={handleMouseUp}
|
||||
onMouseDown={handleMouseDown}
|
||||
|
@ -20,6 +20,12 @@ const UserSidebarMenu = ({ user }: { user?: User }) => {
|
||||
const navigate = useNavigate()
|
||||
const send = useSettingsAuthContext()?.auth?.send
|
||||
|
||||
// This image host goes down sometimes. We will instead rewrite the
|
||||
// resource to be a local one.
|
||||
if (user?.image === 'https://placekitten.com/200/200') {
|
||||
user.image = '/cat.jpg'
|
||||
}
|
||||
|
||||
// Fallback logic for displaying user's "name":
|
||||
// 1. user.name
|
||||
// 2. user.first_name + ' ' + user.last_name
|
||||
|
@ -1,197 +1,54 @@
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import * as LSP from 'vscode-languageserver-protocol'
|
||||
|
||||
import { LspContext, LspWorkerEventType } from './types'
|
||||
import {
|
||||
registerServerCapability,
|
||||
unregisterServerCapability,
|
||||
} from './server-capability-registration'
|
||||
import { Codec, FromServer, IntoServer } from './codec'
|
||||
import { err } from 'lib/trap'
|
||||
LanguageClient,
|
||||
LanguageClientOptions,
|
||||
} from 'vscode-languageclient/browser'
|
||||
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
||||
|
||||
const client_capabilities: LSP.ClientCapabilities = {
|
||||
textDocument: {
|
||||
hover: {
|
||||
dynamicRegistration: true,
|
||||
contentFormat: ['plaintext', 'markdown'],
|
||||
},
|
||||
moniker: {},
|
||||
synchronization: {
|
||||
dynamicRegistration: true,
|
||||
willSave: false,
|
||||
didSave: false,
|
||||
willSaveWaitUntil: false,
|
||||
},
|
||||
completion: {
|
||||
dynamicRegistration: true,
|
||||
completionItem: {
|
||||
snippetSupport: false,
|
||||
commitCharactersSupport: true,
|
||||
documentationFormat: ['plaintext', 'markdown'],
|
||||
deprecatedSupport: false,
|
||||
preselectSupport: false,
|
||||
},
|
||||
contextSupport: false,
|
||||
},
|
||||
signatureHelp: {
|
||||
dynamicRegistration: true,
|
||||
signatureInformation: {
|
||||
documentationFormat: ['plaintext', 'markdown'],
|
||||
},
|
||||
},
|
||||
declaration: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
definition: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
typeDefinition: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
implementation: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
},
|
||||
workspace: {
|
||||
didChangeConfiguration: {
|
||||
dynamicRegistration: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
export default class LspServerClient {
|
||||
context: LspContext
|
||||
client: LanguageClient | null = null
|
||||
worker: Worker | null = null
|
||||
|
||||
export default class Client extends jsrpc.JSONRPCServerAndClient {
|
||||
afterInitializedHooks: (() => Promise<void>)[] = []
|
||||
#fromServer: FromServer
|
||||
private serverCapabilities: LSP.ServerCapabilities<any> = {}
|
||||
private notifyFn: ((message: LSP.NotificationMessage) => void) | null = null
|
||||
|
||||
constructor(fromServer: FromServer, intoServer: IntoServer) {
|
||||
super(
|
||||
new jsrpc.JSONRPCServer(),
|
||||
new jsrpc.JSONRPCClient(async (json: jsrpc.JSONRPCRequest) => {
|
||||
const encoded = Codec.encode(json)
|
||||
intoServer.enqueue(encoded)
|
||||
if (null != json.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const response = await fromServer.responses.get(json.id)
|
||||
this.client.receive(response as jsrpc.JSONRPCResponse)
|
||||
}
|
||||
})
|
||||
)
|
||||
this.#fromServer = fromServer
|
||||
constructor(context: LspContext) {
|
||||
this.context = context
|
||||
}
|
||||
|
||||
async start(): Promise<void> {
|
||||
// process "window/logMessage": client <- server
|
||||
this.addMethod(LSP.LogMessageNotification.type.method, (params) => {
|
||||
const { type, message } = params as {
|
||||
type: LSP.MessageType
|
||||
message: string
|
||||
}
|
||||
let messageString = ''
|
||||
switch (type) {
|
||||
case LSP.MessageType.Error: {
|
||||
messageString += '[error] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Warning: {
|
||||
messageString += ' [warn] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Info: {
|
||||
messageString += ' [info] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Log: {
|
||||
messageString += ' [log] '
|
||||
break
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
messageString += message
|
||||
async startServer() {
|
||||
this.worker = new Worker({ name: this.context.worker })
|
||||
this.worker.postMessage({
|
||||
worker: this.context.worker,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: this.context.options,
|
||||
})
|
||||
}
|
||||
|
||||
async startClient() {
|
||||
const clientOptions: LanguageClientOptions = {
|
||||
documentSelector: [{ language: 'kcl' }],
|
||||
diagnosticCollectionName: 'markers',
|
||||
}
|
||||
|
||||
if (!this.worker) {
|
||||
console.error('Worker not initialized')
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
// process "client/registerCapability": client <- server
|
||||
this.addMethod(LSP.RegistrationRequest.type.method, (params) => {
|
||||
// Register a server capability.
|
||||
params.registrations.forEach(
|
||||
(capabilityRegistration: LSP.Registration) => {
|
||||
const caps = registerServerCapability(
|
||||
this.serverCapabilities,
|
||||
capabilityRegistration
|
||||
)
|
||||
if (err(caps)) return (this.serverCapabilities = {})
|
||||
this.serverCapabilities = caps
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
// process "client/unregisterCapability": client <- server
|
||||
this.addMethod(LSP.UnregistrationRequest.type.method, (params) => {
|
||||
// Unregister a server capability.
|
||||
params.unregisterations.forEach(
|
||||
(capabilityUnregistration: LSP.Unregistration) => {
|
||||
const caps = unregisterServerCapability(
|
||||
this.serverCapabilities,
|
||||
capabilityUnregistration
|
||||
)
|
||||
if (err(caps)) return (this.serverCapabilities = {})
|
||||
this.serverCapabilities = caps
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
// request "initialize": client <-> server
|
||||
const { capabilities } = await this.request(
|
||||
LSP.InitializeRequest.type.method,
|
||||
{
|
||||
processId: null,
|
||||
clientInfo: {
|
||||
name: 'kcl-language-client',
|
||||
},
|
||||
capabilities: client_capabilities,
|
||||
rootUri: null,
|
||||
} as LSP.InitializeParams
|
||||
this.client = new LanguageClient(
|
||||
this.context.worker + 'LspClient',
|
||||
this.context.worker + ' LSP Client',
|
||||
clientOptions,
|
||||
this.worker
|
||||
)
|
||||
|
||||
this.serverCapabilities = capabilities
|
||||
|
||||
// notify "initialized": client --> server
|
||||
this.notify(LSP.InitializedNotification.type.method, {})
|
||||
|
||||
await Promise.all(
|
||||
this.afterInitializedHooks.map((f: () => Promise<void>) => f())
|
||||
)
|
||||
await Promise.all([this.processNotifications(), this.processRequests()])
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||
return this.serverCapabilities
|
||||
}
|
||||
|
||||
setNotifyFn(fn: (message: LSP.NotificationMessage) => void): void {
|
||||
this.notifyFn = fn
|
||||
}
|
||||
|
||||
async processNotifications(): Promise<void> {
|
||||
for await (const notification of this.#fromServer.notifications) {
|
||||
if (this.notifyFn) {
|
||||
this.notifyFn(notification)
|
||||
}
|
||||
try {
|
||||
await this.client.start()
|
||||
} catch (error) {
|
||||
this.client.error(`Start failed`, error, 'force')
|
||||
}
|
||||
}
|
||||
|
||||
async processRequests(): Promise<void> {
|
||||
for await (const request of this.#fromServer.requests) {
|
||||
await this.receiveAndSend(request)
|
||||
}
|
||||
}
|
||||
|
||||
pushAfterInitializeHook(...hooks: (() => Promise<void>)[]): void {
|
||||
this.afterInitializedHooks.push(...hooks)
|
||||
deactivate() {
|
||||
return this.client?.stop()
|
||||
}
|
||||
}
|
||||
|
@ -1,79 +0,0 @@
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import * as vsrpc from 'vscode-jsonrpc'
|
||||
|
||||
import Bytes from './codec/bytes'
|
||||
import StreamDemuxer from './codec/demuxer'
|
||||
import Headers from './codec/headers'
|
||||
import Queue from './codec/queue'
|
||||
import Tracer from './tracer'
|
||||
import { LspWorkerEventType, LspWorker } from './types'
|
||||
|
||||
export const encoder = new TextEncoder()
|
||||
export const decoder = new TextDecoder()
|
||||
|
||||
export class Codec {
|
||||
static encode(
|
||||
json: jsrpc.JSONRPCRequest | jsrpc.JSONRPCResponse
|
||||
): Uint8Array {
|
||||
const message = JSON.stringify(json)
|
||||
const delimited = Headers.add(message)
|
||||
return Bytes.encode(delimited)
|
||||
}
|
||||
|
||||
static decode<T>(data: Uint8Array): T {
|
||||
const delimited = Bytes.decode(data)
|
||||
const message = Headers.remove(delimited)
|
||||
return JSON.parse(message) as T
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: tracing efficiency
|
||||
export class IntoServer
|
||||
extends Queue<Uint8Array>
|
||||
implements AsyncGenerator<Uint8Array, never, void>
|
||||
{
|
||||
private worker: Worker | null = null
|
||||
private type_: LspWorker | null = null
|
||||
constructor(type_?: LspWorker, worker?: Worker) {
|
||||
super()
|
||||
if (worker && type_) {
|
||||
this.worker = worker
|
||||
this.type_ = type_
|
||||
}
|
||||
}
|
||||
enqueue(item: Uint8Array): void {
|
||||
Tracer.client(Headers.remove(decoder.decode(item)))
|
||||
if (this.worker) {
|
||||
this.worker.postMessage({
|
||||
worker: this.type_,
|
||||
eventType: LspWorkerEventType.Call,
|
||||
eventData: item,
|
||||
})
|
||||
} else {
|
||||
super.enqueue(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface FromServer extends WritableStream<Uint8Array> {
|
||||
readonly responses: {
|
||||
get(key: number | string): null | Promise<vsrpc.ResponseMessage>
|
||||
}
|
||||
readonly notifications: AsyncGenerator<vsrpc.NotificationMessage, never, void>
|
||||
readonly requests: AsyncGenerator<vsrpc.RequestMessage, never, void>
|
||||
|
||||
add(item: Uint8Array): void
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace FromServer {
|
||||
export function create(): FromServer | Error {
|
||||
// Calls private method .start() which can throw.
|
||||
// This is an odd one of the bunch but try/catch seems most suitable here.
|
||||
try {
|
||||
return new StreamDemuxer()
|
||||
} catch (e: any) {
|
||||
return e
|
||||
}
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
import { encoder, decoder } from '../codec'
|
||||
|
||||
export default class Bytes {
|
||||
static encode(input: string): Uint8Array {
|
||||
return encoder.encode(input)
|
||||
}
|
||||
|
||||
static decode(input: Uint8Array): string {
|
||||
return decoder.decode(input)
|
||||
}
|
||||
|
||||
static append<
|
||||
T extends { length: number; set(arr: T, offset: number): void }
|
||||
>(constructor: { new (length: number): T }, ...arrays: T[]) {
|
||||
let totalLength = 0
|
||||
for (const arr of arrays) {
|
||||
totalLength += arr.length
|
||||
}
|
||||
const result = new constructor(totalLength)
|
||||
let offset = 0
|
||||
for (const arr of arrays) {
|
||||
result.set(arr, offset)
|
||||
offset += arr.length
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
@ -1,101 +0,0 @@
|
||||
import * as vsrpc from 'vscode-jsonrpc'
|
||||
|
||||
import Bytes from './bytes'
|
||||
import PromiseMap from './map'
|
||||
import Queue from './queue'
|
||||
import Tracer from '../tracer'
|
||||
import { Codec } from '../codec'
|
||||
|
||||
export default class StreamDemuxer extends Queue<Uint8Array> {
|
||||
readonly responses: PromiseMap<number | string, vsrpc.ResponseMessage> =
|
||||
new PromiseMap()
|
||||
readonly notifications: Queue<vsrpc.NotificationMessage> =
|
||||
new Queue<vsrpc.NotificationMessage>()
|
||||
readonly requests: Queue<vsrpc.RequestMessage> =
|
||||
new Queue<vsrpc.RequestMessage>()
|
||||
|
||||
readonly #start: Promise<void>
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.#start = this.start()
|
||||
}
|
||||
|
||||
private async start(): Promise<void> {
|
||||
let contentLength: null | number = null
|
||||
let buffer = new Uint8Array()
|
||||
|
||||
for await (const bytes of this) {
|
||||
buffer = Bytes.append(Uint8Array, buffer, bytes)
|
||||
while (buffer.length > 0) {
|
||||
// check if the content length is known
|
||||
if (null == contentLength) {
|
||||
// if not, try to match the prefixed headers
|
||||
const match = Bytes.decode(buffer).match(
|
||||
/^Content-Length:\s*(\d+)\s*/
|
||||
)
|
||||
if (null == match) continue
|
||||
|
||||
// try to parse the content-length from the headers
|
||||
const length = parseInt(match[1])
|
||||
|
||||
if (isNaN(length))
|
||||
return Promise.reject(new Error('invalid content length'))
|
||||
|
||||
// slice the headers since we now have the content length
|
||||
buffer = buffer.slice(match[0].length)
|
||||
|
||||
// set the content length
|
||||
contentLength = length
|
||||
}
|
||||
|
||||
// if the buffer doesn't contain a full message; await another iteration
|
||||
if (buffer.length < contentLength) continue
|
||||
|
||||
// Get just the slice of the buffer that is our content length.
|
||||
const slice = buffer.slice(0, contentLength)
|
||||
|
||||
// decode buffer to a string
|
||||
const delimited = Bytes.decode(slice)
|
||||
|
||||
// reset the buffer
|
||||
buffer = buffer.slice(contentLength)
|
||||
// reset the contentLength
|
||||
contentLength = null
|
||||
|
||||
const message = JSON.parse(delimited) as vsrpc.Message
|
||||
Tracer.server(message)
|
||||
|
||||
// demux the message stream
|
||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
||||
this.responses.set(message.id, message)
|
||||
continue
|
||||
}
|
||||
if (vsrpc.Message.isNotification(message)) {
|
||||
this.notifications.enqueue(message)
|
||||
continue
|
||||
}
|
||||
if (vsrpc.Message.isRequest(message)) {
|
||||
this.requests.enqueue(message)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
add(bytes: Uint8Array): void {
|
||||
const message = Codec.decode(bytes) as vsrpc.Message
|
||||
Tracer.server(message)
|
||||
|
||||
// demux the message stream
|
||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
||||
this.responses.set(message.id, message)
|
||||
}
|
||||
if (vsrpc.Message.isNotification(message)) {
|
||||
this.notifications.enqueue(message)
|
||||
}
|
||||
if (vsrpc.Message.isRequest(message)) {
|
||||
this.requests.enqueue(message)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
export default class Headers {
|
||||
static add(message: string): string {
|
||||
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||
}
|
||||
|
||||
static remove(delimited: string): string {
|
||||
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
export default class PromiseMap<K, V extends { toString(): string }> {
|
||||
#map: Map<K, PromiseMap.Entry<V>> = new Map()
|
||||
|
||||
get(key: K & { toString(): string }): null | Promise<V> {
|
||||
let initialized: PromiseMap.Entry<V>
|
||||
// if the entry doesn't exist, set it
|
||||
if (!this.#map.has(key)) {
|
||||
initialized = this.#set(key)
|
||||
} else {
|
||||
// otherwise return the entry
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
initialized = this.#map.get(key)!
|
||||
}
|
||||
// if the entry is a pending promise, return it
|
||||
if (initialized.status === 'pending') {
|
||||
return initialized.promise
|
||||
} else {
|
||||
// otherwise return null
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
#set(key: K, value?: V): PromiseMap.Entry<V> {
|
||||
if (this.#map.has(key)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
return this.#map.get(key)!
|
||||
}
|
||||
// placeholder resolver for entry
|
||||
let resolve = (item: V) => {
|
||||
void item
|
||||
}
|
||||
// promise for entry (which assigns the resolver
|
||||
const promise = new Promise<V>((resolver) => {
|
||||
resolve = resolver
|
||||
})
|
||||
// the initialized entry
|
||||
const initialized: PromiseMap.Entry<V> = {
|
||||
status: 'pending',
|
||||
resolve,
|
||||
promise,
|
||||
}
|
||||
if (null != value) {
|
||||
initialized.resolve(value)
|
||||
}
|
||||
// set the entry
|
||||
this.#map.set(key, initialized)
|
||||
return initialized
|
||||
}
|
||||
|
||||
set(key: K & { toString(): string }, value: V): this {
|
||||
const initialized = this.#set(key, value)
|
||||
// if the promise is pending ...
|
||||
if (initialized.status === 'pending') {
|
||||
// ... set the entry status to resolved to free the promise
|
||||
this.#map.set(key, { status: 'resolved' })
|
||||
// ... and resolve the promise with the given value
|
||||
initialized.resolve(value)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.#map.size
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace PromiseMap {
|
||||
export type Entry<V> =
|
||||
| { status: 'pending'; resolve: (item: V) => void; promise: Promise<V> }
|
||||
| { status: 'resolved' }
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
import type * as LSP from 'vscode-languageserver-protocol'
|
||||
import Client from './client'
|
||||
import { SemanticToken, deserializeTokens } from './kcl/semantic_tokens'
|
||||
import LspServerClient from './client'
|
||||
import { LanguageServerPlugin } from 'editor/plugins/lsp/plugin'
|
||||
import { CopilotLspCompletionParams } from 'wasm-lib/kcl/bindings/CopilotLspCompletionParams'
|
||||
import { CopilotCompletionResponse } from 'wasm-lib/kcl/bindings/CopilotCompletionResponse'
|
||||
@ -10,7 +9,7 @@ import { UpdateUnitsParams } from 'wasm-lib/kcl/bindings/UpdateUnitsParams'
|
||||
import { UpdateCanExecuteParams } from 'wasm-lib/kcl/bindings/UpdateCanExecuteParams'
|
||||
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
|
||||
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
|
||||
import { LspWorker } from './types'
|
||||
import { LspWorker } from 'editor/plugins/lsp/types'
|
||||
|
||||
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
|
||||
|
||||
@ -54,7 +53,7 @@ interface LSPNotifyMap {
|
||||
}
|
||||
|
||||
export interface LanguageServerClientOptions {
|
||||
client: Client
|
||||
client: LspServerClient
|
||||
name: LspWorker
|
||||
}
|
||||
|
||||
@ -67,7 +66,7 @@ export interface LanguageServerOptions {
|
||||
}
|
||||
|
||||
export class LanguageServerClient {
|
||||
private client: Client
|
||||
private client: LspServerClient
|
||||
readonly name: string
|
||||
|
||||
public ready: boolean
|
||||
@ -77,7 +76,8 @@ export class LanguageServerClient {
|
||||
public initializePromise: Promise<void>
|
||||
|
||||
private isUpdatingSemanticTokens: boolean = false
|
||||
private semanticTokens: SemanticToken[] = []
|
||||
// tODO: Fix this type
|
||||
private semanticTokens: any = {}
|
||||
private queuedUids: string[] = []
|
||||
|
||||
constructor(options: LanguageServerClientOptions) {
|
||||
@ -93,8 +93,7 @@ export class LanguageServerClient {
|
||||
|
||||
async initialize() {
|
||||
// Start the client in the background.
|
||||
this.client.setNotifyFn(this.processNotifications.bind(this))
|
||||
this.client.start()
|
||||
this.client.startClient()
|
||||
|
||||
this.ready = true
|
||||
}
|
||||
@ -103,10 +102,6 @@ export class LanguageServerClient {
|
||||
return this.name
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||
return this.client.getServerCapabilities()
|
||||
}
|
||||
|
||||
close() {}
|
||||
|
||||
textDocumentDidOpen(params: LSP.DidOpenTextDocumentParams) {
|
||||
@ -117,13 +112,10 @@ export class LanguageServerClient {
|
||||
plugin.documentUri = params.textDocument.uri
|
||||
plugin.languageId = params.textDocument.languageId
|
||||
}
|
||||
|
||||
this.updateSemanticTokens(params.textDocument.uri)
|
||||
}
|
||||
|
||||
textDocumentDidChange(params: LSP.DidChangeTextDocumentParams) {
|
||||
this.notify('textDocument/didChange', params)
|
||||
this.updateSemanticTokens(params.textDocument.uri)
|
||||
}
|
||||
|
||||
textDocumentDidClose(params: LSP.DidCloseTextDocumentParams) {
|
||||
@ -160,64 +152,19 @@ export class LanguageServerClient {
|
||||
this.notify('workspace/didDeleteFiles', params)
|
||||
}
|
||||
|
||||
async updateSemanticTokens(uri: string) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.semanticTokensProvider) {
|
||||
return
|
||||
}
|
||||
|
||||
// Make sure we can only run, if we aren't already running.
|
||||
if (!this.isUpdatingSemanticTokens) {
|
||||
this.isUpdatingSemanticTokens = true
|
||||
|
||||
const result = await this.request('textDocument/semanticTokens/full', {
|
||||
textDocument: {
|
||||
uri,
|
||||
},
|
||||
})
|
||||
|
||||
this.semanticTokens = await deserializeTokens(
|
||||
result.data,
|
||||
this.getServerCapabilities().semanticTokensProvider
|
||||
)
|
||||
|
||||
this.isUpdatingSemanticTokens = false
|
||||
}
|
||||
}
|
||||
|
||||
getSemanticTokens(): SemanticToken[] {
|
||||
return this.semanticTokens
|
||||
}
|
||||
|
||||
async textDocumentHover(params: LSP.HoverParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.hoverProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/hover', params)
|
||||
}
|
||||
|
||||
async textDocumentFormatting(params: LSP.DocumentFormattingParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.documentFormattingProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/formatting', params)
|
||||
}
|
||||
|
||||
async textDocumentFoldingRange(params: LSP.FoldingRangeParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.foldingRangeProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/foldingRange', params)
|
||||
}
|
||||
|
||||
async textDocumentCompletion(params: LSP.CompletionParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.completionProvider) {
|
||||
return
|
||||
}
|
||||
const response = await this.request('textDocument/completion', params)
|
||||
return response
|
||||
}
|
||||
@ -236,14 +183,19 @@ export class LanguageServerClient {
|
||||
method: K,
|
||||
params: LSPRequestMap[K][0]
|
||||
): Promise<LSPRequestMap[K][1]> {
|
||||
return this.client.request(method, params) as Promise<LSPRequestMap[K][1]>
|
||||
return this.client.client?.sendRequest(method, params) as Promise<
|
||||
LSPRequestMap[K][1]
|
||||
>
|
||||
}
|
||||
|
||||
private notify<K extends keyof LSPNotifyMap>(
|
||||
method: K,
|
||||
params: LSPNotifyMap[K]
|
||||
): void {
|
||||
return this.client.notify(method, params)
|
||||
): Promise<void> {
|
||||
if (!this.client.client) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
return this.client.client.sendNotification(method, params)
|
||||
}
|
||||
|
||||
async getCompletion(params: CopilotLspCompletionParams) {
|
||||
@ -253,6 +205,33 @@ export class LanguageServerClient {
|
||||
return response
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> | null {
|
||||
if (!this.client.client) {
|
||||
return null
|
||||
}
|
||||
|
||||
// TODO: Fix this type
|
||||
return null
|
||||
}
|
||||
|
||||
async updateSemanticTokens(uri: string) {
|
||||
// Make sure we can only run, if we aren't already running.
|
||||
if (!this.isUpdatingSemanticTokens) {
|
||||
this.isUpdatingSemanticTokens = true
|
||||
|
||||
this.semanticTokens = await this.request(
|
||||
'textDocument/semanticTokens/full',
|
||||
{
|
||||
textDocument: {
|
||||
uri,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
this.isUpdatingSemanticTokens = false
|
||||
}
|
||||
}
|
||||
|
||||
async accept(uuid: string) {
|
||||
const badUids = this.queuedUids.filter((u) => u !== uuid)
|
||||
this.queuedUids = []
|
||||
@ -286,6 +265,7 @@ export class LanguageServerClient {
|
||||
return await this.request('kcl/updateCanExecute', params)
|
||||
}
|
||||
|
||||
// TODO: Fix this type
|
||||
private processNotifications(notification: LSP.NotificationMessage) {
|
||||
for (const plugin of this.plugins) plugin.processNotification(notification)
|
||||
}
|
||||
|
@ -122,13 +122,13 @@ export function kclPlugin(options: LanguageServerOptions): Extension {
|
||||
const line = state.doc.lineAt(pos)
|
||||
let trigKind: CompletionTriggerKind = CompletionTriggerKind.Invoked
|
||||
let trigChar: string | undefined
|
||||
const serverCapabilities = plugin.client.getServerCapabilities()
|
||||
if (
|
||||
serverCapabilities &&
|
||||
!explicit &&
|
||||
plugin.client
|
||||
.getServerCapabilities()
|
||||
.completionProvider?.triggerCharacters?.includes(
|
||||
line.text[pos - line.from - 1]
|
||||
)
|
||||
serverCapabilities.completionProvider?.triggerCharacters?.includes(
|
||||
line.text[pos - line.from - 1]
|
||||
)
|
||||
) {
|
||||
trigKind = CompletionTriggerKind.TriggerCharacter
|
||||
trigChar = line.text[pos - line.from - 1]
|
||||
|
@ -1,168 +0,0 @@
|
||||
// Extends the codemirror Parser for kcl.
|
||||
|
||||
import {
|
||||
Parser,
|
||||
Input,
|
||||
TreeFragment,
|
||||
PartialParse,
|
||||
Tree,
|
||||
NodeType,
|
||||
NodeSet,
|
||||
} from '@lezer/common'
|
||||
import { LanguageServerClient } from 'editor/plugins/lsp'
|
||||
import { posToOffset } from 'editor/plugins/lsp/util'
|
||||
import { SemanticToken } from './semantic_tokens'
|
||||
import { DocInput } from '@codemirror/language'
|
||||
import { tags, styleTags } from '@lezer/highlight'
|
||||
|
||||
export default class KclParser extends Parser {
|
||||
private client: LanguageServerClient
|
||||
|
||||
constructor(client: LanguageServerClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
createParse(
|
||||
input: Input,
|
||||
fragments: readonly TreeFragment[],
|
||||
ranges: readonly { from: number; to: number }[]
|
||||
): PartialParse {
|
||||
let parse: PartialParse = new Context(this, input, fragments, ranges)
|
||||
return parse
|
||||
}
|
||||
|
||||
getTokenTypes(): string[] {
|
||||
return this.client.getServerCapabilities().semanticTokensProvider!.legend
|
||||
.tokenTypes
|
||||
}
|
||||
|
||||
getSemanticTokens(): SemanticToken[] {
|
||||
return this.client.getSemanticTokens()
|
||||
}
|
||||
}
|
||||
|
||||
class Context implements PartialParse {
|
||||
private parser: KclParser
|
||||
private input: DocInput
|
||||
private fragments: readonly TreeFragment[]
|
||||
private ranges: readonly { from: number; to: number }[]
|
||||
|
||||
private nodeTypes: { [key: string]: NodeType }
|
||||
stoppedAt: number = 0
|
||||
|
||||
private semanticTokens: SemanticToken[] = []
|
||||
private currentLine: number = 0
|
||||
private currentColumn: number = 0
|
||||
private nodeSet: NodeSet
|
||||
|
||||
constructor(
|
||||
/// The parser configuration used.
|
||||
parser: KclParser,
|
||||
input: Input,
|
||||
fragments: readonly TreeFragment[],
|
||||
ranges: readonly { from: number; to: number }[]
|
||||
) {
|
||||
this.parser = parser
|
||||
this.input = input as DocInput
|
||||
this.fragments = fragments
|
||||
this.ranges = ranges
|
||||
|
||||
// Iterate over the semantic token types and create a node type for each.
|
||||
this.nodeTypes = {}
|
||||
let nodeArray: NodeType[] = []
|
||||
this.parser.getTokenTypes().forEach((tokenType, index) => {
|
||||
const nodeType = NodeType.define({
|
||||
id: index,
|
||||
name: tokenType,
|
||||
// props: [this.styleTags],
|
||||
})
|
||||
this.nodeTypes[tokenType] = nodeType
|
||||
nodeArray.push(nodeType)
|
||||
})
|
||||
|
||||
this.semanticTokens = this.parser.getSemanticTokens()
|
||||
const styles = styleTags({
|
||||
number: tags.number,
|
||||
variable: tags.variableName,
|
||||
operator: tags.operator,
|
||||
keyword: tags.keyword,
|
||||
string: tags.string,
|
||||
comment: tags.comment,
|
||||
function: tags.function(tags.variableName),
|
||||
})
|
||||
this.nodeSet = new NodeSet(nodeArray).extend(styles)
|
||||
}
|
||||
|
||||
get parsedPos(): number {
|
||||
return 0
|
||||
}
|
||||
|
||||
advance(): Tree | null {
|
||||
if (this.semanticTokens.length === 0) {
|
||||
return new Tree(NodeType.none, [], [], 0)
|
||||
}
|
||||
const tree = this.createTree(this.semanticTokens[0], 0)
|
||||
this.stoppedAt = this.input.doc.length
|
||||
return tree
|
||||
}
|
||||
|
||||
createTree(token: SemanticToken, index: number): Tree {
|
||||
const changedLine = token.delta_line !== 0
|
||||
this.currentLine += token.delta_line
|
||||
if (changedLine) {
|
||||
this.currentColumn = 0
|
||||
}
|
||||
this.currentColumn += token.delta_start
|
||||
|
||||
// Let's get our position relative to the start of the file.
|
||||
let currentPosition = posToOffset(this.input.doc, {
|
||||
line: this.currentLine,
|
||||
character: this.currentColumn,
|
||||
})
|
||||
|
||||
const nodeType = this.nodeSet.types[this.nodeTypes[token.token_type].id]
|
||||
|
||||
if (currentPosition === undefined) {
|
||||
// This is bad and weird.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
if (index >= this.semanticTokens.length - 1) {
|
||||
// We have no children.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
const nextIndex = index + 1
|
||||
const nextToken = this.semanticTokens[nextIndex]
|
||||
const changedLineNext = nextToken.delta_line !== 0
|
||||
const nextLine = this.currentLine + nextToken.delta_line
|
||||
const nextColumn = changedLineNext
|
||||
? nextToken.delta_start
|
||||
: this.currentColumn + nextToken.delta_start
|
||||
const nextPosition = posToOffset(this.input.doc, {
|
||||
line: nextLine,
|
||||
character: nextColumn,
|
||||
})
|
||||
|
||||
if (nextPosition === undefined) {
|
||||
// This is bad and weird.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
// Let's get the
|
||||
|
||||
return new Tree(
|
||||
nodeType,
|
||||
[this.createTree(nextToken, nextIndex)],
|
||||
|
||||
// The positions (offsets relative to the start of this tree) of the children.
|
||||
[nextPosition - currentPosition],
|
||||
token.length
|
||||
)
|
||||
}
|
||||
|
||||
stopAt(pos: number) {
|
||||
this.stoppedAt = pos
|
||||
}
|
||||
}
|
@ -1,51 +0,0 @@
|
||||
import type * as LSP from 'vscode-languageserver-protocol'
|
||||
|
||||
export class SemanticToken {
|
||||
delta_line: number
|
||||
delta_start: number
|
||||
length: number
|
||||
token_type: string
|
||||
token_modifiers_bitset: string
|
||||
|
||||
constructor(
|
||||
delta_line = 0,
|
||||
delta_start = 0,
|
||||
length = 0,
|
||||
token_type = '',
|
||||
token_modifiers_bitset = ''
|
||||
) {
|
||||
this.delta_line = delta_line
|
||||
this.delta_start = delta_start
|
||||
this.length = length
|
||||
this.token_type = token_type
|
||||
this.token_modifiers_bitset = token_modifiers_bitset
|
||||
}
|
||||
}
|
||||
|
||||
export async function deserializeTokens(
|
||||
data: number[],
|
||||
semanticTokensProvider?: LSP.SemanticTokensOptions
|
||||
): Promise<SemanticToken[]> {
|
||||
if (!semanticTokensProvider) {
|
||||
return []
|
||||
}
|
||||
// Check if data length is divisible by 5
|
||||
if (data.length % 5 !== 0) {
|
||||
return Promise.reject(new Error('Length is not divisible by 5'))
|
||||
}
|
||||
|
||||
const tokens = []
|
||||
for (let i = 0; i < data.length; i += 5) {
|
||||
tokens.push(
|
||||
new SemanticToken(
|
||||
data[i],
|
||||
data[i + 1],
|
||||
data[i + 2],
|
||||
semanticTokensProvider.legend.tokenTypes[data[i + 3]],
|
||||
semanticTokensProvider.legend.tokenModifiers[data[i + 4]]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
@ -145,11 +145,7 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
view: EditorView,
|
||||
{ line, character }: { line: number; character: number }
|
||||
): Promise<Tooltip | null> {
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().hoverProvider
|
||||
)
|
||||
return null
|
||||
if (!this.client.ready) return null
|
||||
|
||||
this.sendChange({ documentText: view.state.doc.toString() })
|
||||
const result = await this.client.textDocumentHover({
|
||||
@ -175,11 +171,7 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
}
|
||||
|
||||
async getFoldingRanges(): Promise<LSP.FoldingRange[] | null> {
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().foldingRangeProvider
|
||||
)
|
||||
return null
|
||||
if (!this.client.ready) return null
|
||||
const result = await this.client.textDocumentFoldingRange({
|
||||
textDocument: { uri: this.documentUri },
|
||||
})
|
||||
@ -259,11 +251,7 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
}
|
||||
|
||||
async requestFormatting() {
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().documentFormattingProvider
|
||||
)
|
||||
return null
|
||||
if (!this.client.ready) return null
|
||||
|
||||
this.client.textDocumentDidChange({
|
||||
textDocument: {
|
||||
@ -309,11 +297,7 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
triggerCharacter: string | undefined
|
||||
}
|
||||
): Promise<CompletionResult | null> {
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().completionProvider
|
||||
)
|
||||
return null
|
||||
if (!this.client.ready) return null
|
||||
|
||||
this.sendChange({
|
||||
documentText: context.state.doc.toString(),
|
||||
|
@ -33,15 +33,19 @@ export default class Queue<T>
|
||||
}
|
||||
}
|
||||
|
||||
constructor() {
|
||||
constructor(stream?: WritableStream<T>) {
|
||||
const closed = this.#closed
|
||||
const promises = this.#promises
|
||||
const resolvers = this.#resolvers
|
||||
this.#stream = new WritableStream({
|
||||
write(item: T): void {
|
||||
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||
},
|
||||
})
|
||||
if (stream) {
|
||||
this.#stream = stream
|
||||
} else {
|
||||
this.#stream = new WritableStream({
|
||||
write(item: T): void {
|
||||
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#add(): void {
|
@ -1,82 +0,0 @@
|
||||
import {
|
||||
Registration,
|
||||
ServerCapabilities,
|
||||
Unregistration,
|
||||
} from 'vscode-languageserver-protocol'
|
||||
|
||||
interface IFlexibleServerCapabilities extends ServerCapabilities {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface IMethodServerCapabilityProviderDictionary {
|
||||
[key: string]: string
|
||||
}
|
||||
|
||||
const ServerCapabilitiesProviders: IMethodServerCapabilityProviderDictionary = {
|
||||
'textDocument/hover': 'hoverProvider',
|
||||
'textDocument/completion': 'completionProvider',
|
||||
'textDocument/signatureHelp': 'signatureHelpProvider',
|
||||
'textDocument/definition': 'definitionProvider',
|
||||
'textDocument/typeDefinition': 'typeDefinitionProvider',
|
||||
'textDocument/implementation': 'implementationProvider',
|
||||
'textDocument/references': 'referencesProvider',
|
||||
'textDocument/documentHighlight': 'documentHighlightProvider',
|
||||
'textDocument/documentSymbol': 'documentSymbolProvider',
|
||||
'textDocument/workspaceSymbol': 'workspaceSymbolProvider',
|
||||
'textDocument/codeAction': 'codeActionProvider',
|
||||
'textDocument/codeLens': 'codeLensProvider',
|
||||
'textDocument/documentFormatting': 'documentFormattingProvider',
|
||||
'textDocument/documentRangeFormatting': 'documentRangeFormattingProvider',
|
||||
'textDocument/documentOnTypeFormatting': 'documentOnTypeFormattingProvider',
|
||||
'textDocument/rename': 'renameProvider',
|
||||
'textDocument/documentLink': 'documentLinkProvider',
|
||||
'textDocument/color': 'colorProvider',
|
||||
'textDocument/foldingRange': 'foldingRangeProvider',
|
||||
'textDocument/declaration': 'declarationProvider',
|
||||
'textDocument/executeCommand': 'executeCommandProvider',
|
||||
'textDocument/semanticTokens/full': 'semanticTokensProvider',
|
||||
'textDocument/publishDiagnostics': 'diagnosticsProvider',
|
||||
}
|
||||
|
||||
function registerServerCapability(
|
||||
serverCapabilities: ServerCapabilities,
|
||||
registration: Registration
|
||||
): ServerCapabilities | Error {
|
||||
const serverCapabilitiesCopy = JSON.parse(
|
||||
JSON.stringify(serverCapabilities)
|
||||
) as IFlexibleServerCapabilities
|
||||
const { method, registerOptions } = registration
|
||||
const providerName = ServerCapabilitiesProviders[method]
|
||||
|
||||
if (providerName) {
|
||||
if (!registerOptions) {
|
||||
serverCapabilitiesCopy[providerName] = true
|
||||
} else {
|
||||
serverCapabilitiesCopy[providerName] = Object.assign(
|
||||
{},
|
||||
JSON.parse(JSON.stringify(registerOptions))
|
||||
)
|
||||
}
|
||||
} else {
|
||||
return new Error('Could not register server capability.')
|
||||
}
|
||||
|
||||
return serverCapabilitiesCopy
|
||||
}
|
||||
|
||||
function unregisterServerCapability(
|
||||
serverCapabilities: ServerCapabilities,
|
||||
unregistration: Unregistration
|
||||
): ServerCapabilities {
|
||||
const serverCapabilitiesCopy = JSON.parse(
|
||||
JSON.stringify(serverCapabilities)
|
||||
) as IFlexibleServerCapabilities
|
||||
const { method } = unregistration
|
||||
const providerName = ServerCapabilitiesProviders[method]
|
||||
|
||||
delete serverCapabilitiesCopy[providerName]
|
||||
|
||||
return serverCapabilitiesCopy
|
||||
}
|
||||
|
||||
export { registerServerCapability, unregisterServerCapability }
|
@ -1,21 +0,0 @@
|
||||
import { Message } from 'vscode-languageserver-protocol'
|
||||
|
||||
const env = import.meta.env.MODE
|
||||
|
||||
export default class Tracer {
|
||||
static client(message: string): void {
|
||||
// These are really noisy, so we have a special env var for them.
|
||||
if (env === 'lsp_tracing') {
|
||||
console.log('lsp client message', message)
|
||||
}
|
||||
}
|
||||
|
||||
static server(input: string | Message): void {
|
||||
// These are really noisy, so we have a special env var for them.
|
||||
if (env === 'lsp_tracing') {
|
||||
const message: string =
|
||||
typeof input === 'string' ? input : JSON.stringify(input)
|
||||
console.log('lsp server message', message)
|
||||
}
|
||||
}
|
||||
}
|
@ -4,22 +4,27 @@ export enum LspWorker {
|
||||
Kcl = 'kcl',
|
||||
Copilot = 'copilot',
|
||||
}
|
||||
export interface KclWorkerOptions {
|
||||
wasmUrl: string
|
||||
|
||||
interface LspWorkerOptions {
|
||||
token: string
|
||||
baseUnit: UnitLength
|
||||
apiBaseUrl: string
|
||||
callback: () => void
|
||||
wasmUrl: string
|
||||
}
|
||||
|
||||
export interface CopilotWorkerOptions {
|
||||
wasmUrl: string
|
||||
token: string
|
||||
apiBaseUrl: string
|
||||
export interface KclWorkerOptions extends LspWorkerOptions {
|
||||
baseUnit: UnitLength
|
||||
}
|
||||
|
||||
export interface CopilotWorkerOptions extends LspWorkerOptions {}
|
||||
|
||||
export interface LspContext {
|
||||
worker: LspWorker
|
||||
options: KclWorkerOptions | CopilotWorkerOptions
|
||||
}
|
||||
|
||||
export enum LspWorkerEventType {
|
||||
Init = 'init',
|
||||
Call = 'call',
|
||||
}
|
||||
|
||||
export interface LspWorkerEvent {
|
||||
|
@ -1,23 +1,77 @@
|
||||
import { Codec, FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
||||
import { fileSystemManager } from 'lang/std/fileSystemManager'
|
||||
import init, {
|
||||
ServerConfig,
|
||||
copilot_lsp_run,
|
||||
kcl_lsp_run,
|
||||
} from 'wasm-lib/pkg/wasm_lib'
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import {
|
||||
LspWorkerEventType,
|
||||
LspWorkerEvent,
|
||||
LspWorker,
|
||||
KclWorkerOptions,
|
||||
CopilotWorkerOptions,
|
||||
} from 'editor/plugins/lsp/types'
|
||||
import { EngineCommandManager } from 'lang/std/engineConnection'
|
||||
import { err } from 'lib/trap'
|
||||
import { Message } from 'vscode-languageserver'
|
||||
import { LspWorkerEvent, LspWorkerEventType } from 'editor/plugins/lsp/types'
|
||||
import Queue from 'editor/plugins/lsp/queue'
|
||||
import {
|
||||
BrowserMessageReader,
|
||||
BrowserMessageWriter,
|
||||
} from 'vscode-languageserver-protocol/browser'
|
||||
|
||||
const intoServer: IntoServer = new IntoServer()
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
class Headers {
|
||||
static add(message: string): string {
|
||||
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||
}
|
||||
|
||||
static remove(delimited: string): string {
|
||||
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||
}
|
||||
}
|
||||
|
||||
export const encoder = new TextEncoder()
|
||||
export const decoder = new TextDecoder()
|
||||
|
||||
class Codec {
|
||||
static encode(message: Message): Uint8Array {
|
||||
const rpc = JSON.stringify(message.jsonrpc)
|
||||
const delimited = Headers.add(rpc)
|
||||
return encoder.encode(delimited)
|
||||
}
|
||||
|
||||
static decode<T>(data: Uint8Array): T {
|
||||
const delimited = decoder.decode(data)
|
||||
const message = Headers.remove(delimited)
|
||||
return JSON.parse(message) as T
|
||||
}
|
||||
}
|
||||
|
||||
class IntoServer extends Queue<Uint8Array> {
|
||||
constructor(reader: BrowserMessageReader) {
|
||||
super()
|
||||
reader.listen((message: Message) => {
|
||||
super.enqueue(Codec.encode(message))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class FromServer extends Queue<Uint8Array> {
|
||||
constructor(writer: BrowserMessageWriter) {
|
||||
super(
|
||||
new WritableStream({
|
||||
write(item: Uint8Array): void {
|
||||
writer.write(Codec.decode(item))
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const browserReader = new BrowserMessageReader(self)
|
||||
const browserWriter = new BrowserMessageWriter(self)
|
||||
|
||||
const intoServer = new IntoServer(browserReader)
|
||||
const fromServer = new FromServer(browserWriter)
|
||||
|
||||
// Initialise the wasm module.
|
||||
const initialise = async (wasmUrl: string) => {
|
||||
@ -57,7 +111,7 @@ export async function kclLspRun(
|
||||
}
|
||||
|
||||
onmessage = function (event) {
|
||||
if (err(fromServer)) return
|
||||
if (err(intoServer)) return
|
||||
const { worker, eventType, eventData }: LspWorkerEvent = event.data
|
||||
|
||||
switch (eventType) {
|
||||
@ -95,35 +149,7 @@ onmessage = function (event) {
|
||||
console.error('Worker: Error loading wasm module', worker, error)
|
||||
})
|
||||
break
|
||||
case LspWorkerEventType.Call:
|
||||
const data = eventData as Uint8Array
|
||||
intoServer.enqueue(data)
|
||||
const json: jsrpc.JSONRPCRequest = Codec.decode(data)
|
||||
if (null != json.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
fromServer.responses.get(json.id)!.then((response) => {
|
||||
const encoded = Codec.encode(response as jsrpc.JSONRPCResponse)
|
||||
postMessage(encoded)
|
||||
})
|
||||
}
|
||||
break
|
||||
default:
|
||||
console.error('Worker: Unknown message type', worker, eventType)
|
||||
}
|
||||
}
|
||||
|
||||
new Promise<void>(async (resolve) => {
|
||||
if (err(fromServer)) return
|
||||
for await (const requests of fromServer.requests) {
|
||||
const encoded = Codec.encode(requests as jsrpc.JSONRPCRequest)
|
||||
postMessage(encoded)
|
||||
}
|
||||
})
|
||||
|
||||
new Promise<void>(async (resolve) => {
|
||||
if (err(fromServer)) return
|
||||
for await (const notification of fromServer.notifications) {
|
||||
const encoded = Codec.encode(notification as jsrpc.JSONRPCRequest)
|
||||
postMessage(encoded)
|
||||
}
|
||||
})
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { executeAst } from 'useStore'
|
||||
import { executeAst, lintAst } from 'useStore'
|
||||
import { Selections } from 'lib/selections'
|
||||
import { KCLError, kclErrorsToDiagnostics } from './errors'
|
||||
import { uuidv4 } from 'lib/utils'
|
||||
@ -211,6 +211,9 @@ export class KclManager {
|
||||
ast,
|
||||
engineCommandManager: this.engineCommandManager,
|
||||
})
|
||||
|
||||
editorManager.addDiagnostics(await lintAst({ ast: ast }))
|
||||
|
||||
sceneInfra.modelingSend({ type: 'code edit during sketch' })
|
||||
defaultSelectionFilter(programMemory, this.engineCommandManager)
|
||||
|
||||
@ -261,7 +264,10 @@ export class KclManager {
|
||||
return
|
||||
}
|
||||
const newAst = this.safeParse(newCode)
|
||||
if (!newAst) return
|
||||
if (!newAst) {
|
||||
this.clearAst()
|
||||
return
|
||||
}
|
||||
codeManager.updateCodeEditor(newCode)
|
||||
// Write the file to disk.
|
||||
await codeManager.writeToFile()
|
||||
@ -278,6 +284,9 @@ export class KclManager {
|
||||
engineCommandManager: this.engineCommandManager,
|
||||
useFakeExecutor: true,
|
||||
})
|
||||
|
||||
editorManager.addDiagnostics(await lintAst({ ast: ast }))
|
||||
|
||||
this._logs = logs
|
||||
this._kclErrors = errors
|
||||
this._programMemory = programMemory
|
||||
|
@ -502,11 +502,10 @@ describe('testing pipe operator special', () => {
|
||||
},
|
||||
{ type: 'PipeSubstitution', start: 82, end: 83 },
|
||||
{
|
||||
type: 'Literal',
|
||||
type: 'TagDeclarator',
|
||||
start: 85,
|
||||
end: 93,
|
||||
value: 'myPath',
|
||||
raw: '"myPath"',
|
||||
},
|
||||
],
|
||||
optional: false,
|
||||
@ -1657,11 +1656,10 @@ describe('should recognise callExpresions in binaryExpressions', () => {
|
||||
callee: { type: 'Identifier', start: 8, end: 15, name: 'segEndX' },
|
||||
arguments: [
|
||||
{
|
||||
type: 'Literal',
|
||||
type: 'Identifier',
|
||||
start: 16,
|
||||
end: 23,
|
||||
value: 'seg02',
|
||||
raw: "'seg02'",
|
||||
name: 'seg02',
|
||||
},
|
||||
{ type: 'PipeSubstitution', start: 25, end: 26 },
|
||||
],
|
||||
|
@ -128,7 +128,7 @@ const mySketch001 = startSketchOn('XY')
|
||||
const sk1 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> lineTo([-2.5, 0], %)
|
||||
|> lineTo([0, 10], %, "p")
|
||||
|> lineTo([0, 10], %, $p)
|
||||
|> lineTo([2.5, 0], %)
|
||||
// |> rx(45, %)
|
||||
// |> translate([1,0,1], %)
|
||||
@ -138,7 +138,7 @@ const theExtrude = extrude(2, sk1)
|
||||
const sk2 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> lineTo([-2.5, 0], %)
|
||||
|> lineTo([0, 3], %, "p")
|
||||
|> lineTo([0, 3], %, $o)
|
||||
|> lineTo([2.5, 0], %)
|
||||
// |> transform(theTransf, %)
|
||||
|> extrude(2, %)
|
||||
@ -163,20 +163,20 @@ const sk2 = startSketchOn('XY')
|
||||
type: 'extrudePlane',
|
||||
faceId: expect.any(String),
|
||||
tag: {
|
||||
end: 117,
|
||||
end: 116,
|
||||
start: 114,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
},
|
||||
id: expect.any(String),
|
||||
sourceRange: [95, 118],
|
||||
sourceRange: [95, 117],
|
||||
},
|
||||
{
|
||||
type: 'extrudePlane',
|
||||
faceId: expect.any(String),
|
||||
tag: null,
|
||||
id: expect.any(String),
|
||||
sourceRange: [124, 143],
|
||||
sourceRange: [123, 142],
|
||||
},
|
||||
],
|
||||
sketchGroup: {
|
||||
@ -201,14 +201,14 @@ const sk2 = startSketchOn('XY')
|
||||
from: [-2.5, 0],
|
||||
to: [0, 10],
|
||||
tag: {
|
||||
end: 117,
|
||||
end: 116,
|
||||
start: 114,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
},
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
sourceRange: [95, 118],
|
||||
sourceRange: [95, 117],
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -218,7 +218,7 @@ const sk2 = startSketchOn('XY')
|
||||
tag: null,
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
sourceRange: [124, 143],
|
||||
sourceRange: [123, 142],
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -237,26 +237,26 @@ const sk2 = startSketchOn('XY')
|
||||
faceId: expect.any(String),
|
||||
tag: null,
|
||||
id: expect.any(String),
|
||||
sourceRange: [374, 394],
|
||||
sourceRange: [373, 393],
|
||||
},
|
||||
{
|
||||
type: 'extrudePlane',
|
||||
faceId: expect.any(String),
|
||||
tag: {
|
||||
end: 421,
|
||||
start: 418,
|
||||
end: 419,
|
||||
start: 417,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
value: 'o',
|
||||
},
|
||||
id: expect.any(String),
|
||||
sourceRange: [400, 422],
|
||||
sourceRange: [399, 420],
|
||||
},
|
||||
{
|
||||
type: 'extrudePlane',
|
||||
faceId: expect.any(String),
|
||||
tag: null,
|
||||
id: expect.any(String),
|
||||
sourceRange: [428, 447],
|
||||
sourceRange: [426, 445],
|
||||
},
|
||||
],
|
||||
sketchGroup: {
|
||||
@ -273,7 +273,7 @@ const sk2 = startSketchOn('XY')
|
||||
tag: null,
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
sourceRange: [374, 394],
|
||||
sourceRange: [373, 393],
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -281,14 +281,14 @@ const sk2 = startSketchOn('XY')
|
||||
from: [-2.5, 0],
|
||||
to: [0, 3],
|
||||
tag: {
|
||||
end: 421,
|
||||
start: 418,
|
||||
end: 419,
|
||||
start: 417,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
value: 'o',
|
||||
},
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
sourceRange: [400, 422],
|
||||
sourceRange: [399, 420],
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -298,7 +298,7 @@ const sk2 = startSketchOn('XY')
|
||||
tag: null,
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
sourceRange: [428, 447],
|
||||
sourceRange: [426, 445],
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -306,7 +306,7 @@ const sk2 = startSketchOn('XY')
|
||||
height: 2,
|
||||
startCapId: expect.any(String),
|
||||
endCapId: expect.any(String),
|
||||
__meta: [{ sourceRange: [343, 368] }],
|
||||
__meta: [{ sourceRange: [342, 367] }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
@ -163,25 +163,25 @@ describe('Testing giveSketchFnCallTag', () => {
|
||||
code,
|
||||
'line([0, 0.83], %)'
|
||||
)
|
||||
expect(newCode).toContain("line([0, 0.83], %, 'seg01')")
|
||||
expect(newCode).toContain('line([0, 0.83], %, $seg01)')
|
||||
expect(tag).toBe('seg01')
|
||||
expect(isTagExisting).toBe(false)
|
||||
})
|
||||
it('Should create a unique tag if seg01 already exists', () => {
|
||||
let _code = code.replace(
|
||||
'line([-2.57, -0.13], %)',
|
||||
"line([-2.57, -0.13], %, 'seg01')"
|
||||
'line([-2.57, -0.13], %, $seg01)'
|
||||
)
|
||||
const { newCode, tag, isTagExisting } = giveSketchFnCallTagTestHelper(
|
||||
_code,
|
||||
'line([0, 0.83], %)'
|
||||
)
|
||||
expect(newCode).toContain("line([0, 0.83], %, 'seg02')")
|
||||
expect(newCode).toContain('line([0, 0.83], %, $seg02)')
|
||||
expect(tag).toBe('seg02')
|
||||
expect(isTagExisting).toBe(false)
|
||||
})
|
||||
it('Should return existing tag if it already exists', () => {
|
||||
const lineButWithTag = "line([-2.57, -0.13], %, 'butts')"
|
||||
const lineButWithTag = 'line([-2.57, -0.13], %, $butts)'
|
||||
let _code = code.replace('line([-2.57, -0.13], %)', lineButWithTag)
|
||||
const { newCode, tag, isTagExisting } = giveSketchFnCallTagTestHelper(
|
||||
_code,
|
||||
@ -328,11 +328,11 @@ describe('testing sketchOnExtrudedFace', () => {
|
||||
const newCode = recast(modifiedAst)
|
||||
expect(newCode).toContain(`const part001 = startSketchOn('-XZ')
|
||||
|> startProfileAt([3.58, 2.06], %)
|
||||
|> line([9.7, 9.19], %, 'seg01')
|
||||
|> line([9.7, 9.19], %, $seg01)
|
||||
|> line([8.62, -9.57], %)
|
||||
|> close(%)
|
||||
|> extrude(5 + 7, %)
|
||||
const sketch001 = startSketchOn(part001, 'seg01')`)
|
||||
const sketch001 = startSketchOn(part001, seg01)`)
|
||||
})
|
||||
test('it should be able to extrude on close segments', async () => {
|
||||
const code = `const part001 = startSketchOn('-XZ')
|
||||
@ -371,9 +371,9 @@ const sketch001 = startSketchOn(part001, 'seg01')`)
|
||||
|> startProfileAt([3.58, 2.06], %)
|
||||
|> line([9.7, 9.19], %)
|
||||
|> line([8.62, -9.57], %)
|
||||
|> close(%, 'seg01')
|
||||
|> close(%, $seg01)
|
||||
|> extrude(5 + 7, %)
|
||||
const sketch001 = startSketchOn(part001, 'seg01')`)
|
||||
const sketch001 = startSketchOn(part001, seg01)`)
|
||||
})
|
||||
test('it should be able to extrude on start-end caps', async () => {
|
||||
const code = `const part001 = startSketchOn('-XZ')
|
||||
@ -457,7 +457,7 @@ const sketch001 = startSketchOn(part001, 'END')`)
|
||||
if (err(updatedAst)) throw updatedAst
|
||||
const newCode = recast(updatedAst.modifiedAst)
|
||||
expect(newCode).toContain(`const part001 = extrude(5 + 7, sketch001)
|
||||
const sketch002 = startSketchOn(part001, 'seg01')`)
|
||||
const sketch002 = startSketchOn(part001, seg01)`)
|
||||
})
|
||||
})
|
||||
|
||||
@ -499,49 +499,49 @@ describe('Testing deleteSegmentFromPipeExpression', () => {
|
||||
replace2 = ''
|
||||
) => `const part001 = startSketchOn('-XZ')
|
||||
|> startProfileAt([54.78, -95.91], %)
|
||||
|> line([306.21, 198.82], %, 'b')
|
||||
|> line([306.21, 198.82], %, $b)
|
||||
${!replace1 ? ` |> ${line}\n` : ''} |> angledLine([-65, ${
|
||||
!replace1 ? "segLen('a', %)" : replace1
|
||||
!replace1 ? 'segLen(a, %)' : replace1
|
||||
}], %)
|
||||
|> line([306.21, 198.87], %)
|
||||
|> angledLine([65, ${!replace2 ? "segAng('a', %)" : replace2}], %)
|
||||
|> angledLine([65, ${!replace2 ? 'segAng(a, %)' : replace2}], %)
|
||||
|> line([-963.39, -154.67], %)
|
||||
`
|
||||
test.each([
|
||||
['line', "line([306.21, 198.85], %, 'a')", ['365.11', '33']],
|
||||
['lineTo', "lineTo([306.21, 198.85], %, 'a')", ['110.48', '119.73']],
|
||||
['yLine', "yLine(198.85, %, 'a')", ['198.85', '90']],
|
||||
['xLine', "xLine(198.85, %, 'a')", ['198.85', '0']],
|
||||
['yLineTo', "yLineTo(198.85, %, 'a')", ['95.94', '90']],
|
||||
['xLineTo', "xLineTo(198.85, %, 'a')", ['162.14', '180']],
|
||||
['line', 'line([306.21, 198.85], %, $a)', ['365.11', '33']],
|
||||
['lineTo', 'lineTo([306.21, 198.85], %, $a)', ['110.48', '119.73']],
|
||||
['yLine', 'yLine(198.85, %, $a)', ['198.85', '90']],
|
||||
['xLine', 'xLine(198.85, %, $a)', ['198.85', '0']],
|
||||
['yLineTo', 'yLineTo(198.85, %, $a)', ['95.94', '90']],
|
||||
['xLineTo', 'xLineTo(198.85, %, $a)', ['162.14', '180']],
|
||||
[
|
||||
'angledLine',
|
||||
"angledLine({ angle: 45.5, length: 198.85 }, %, 'a')",
|
||||
'angledLine({ angle: 45.5, length: 198.85 }, %, $a)',
|
||||
['198.85', '45.5'],
|
||||
],
|
||||
[
|
||||
'angledLineOfXLength',
|
||||
"angledLineOfXLength({ angle: 45.5, length: 198.85 }, %, 'a')",
|
||||
'angledLineOfXLength({ angle: 45.5, length: 198.85 }, %, $a)',
|
||||
['283.7', '45.5'],
|
||||
],
|
||||
[
|
||||
'angledLineOfYLength',
|
||||
"angledLineOfYLength({ angle: 45.5, length: 198.85 }, %, 'a')",
|
||||
'angledLineOfYLength({ angle: 45.5, length: 198.85 }, %, $a)',
|
||||
['278.79', '45.5'],
|
||||
],
|
||||
[
|
||||
'angledLineToX',
|
||||
"angledLineToX({ angle: 45.5, to: 198.85 }, %, 'a')",
|
||||
'angledLineToX({ angle: 45.5, to: 198.85 }, %, $a)',
|
||||
['231.33', '134.5'],
|
||||
],
|
||||
[
|
||||
'angledLineToY',
|
||||
"angledLineToY({ angle: 45.5, to: 198.85 }, %, 'a')",
|
||||
'angledLineToY({ angle: 45.5, to: 198.85 }, %, $a)',
|
||||
['134.51', '45.5'],
|
||||
],
|
||||
[
|
||||
'angledLineThatIntersects',
|
||||
`angledLineThatIntersects({ angle: 45.5, intersectTag: 'b', offset: 198.85 }, %, 'a')`,
|
||||
`angledLineThatIntersects({ angle: 45.5, intersectTag: b, offset: 198.85 }, %, $a)`,
|
||||
['918.4', '45.5'],
|
||||
],
|
||||
])(`%s`, async (_, line, [replace1, replace2]) => {
|
||||
@ -579,7 +579,7 @@ describe('Testing removeSingleConstraintInfo', () => {
|
||||
|> lineTo([6.14 + 0, 3.14 + 0], %)
|
||||
|> xLineTo(8 + 0, %)
|
||||
|> yLineTo(5 + 0, %)
|
||||
|> yLine(3.14 + 0, %, 'a')
|
||||
|> yLine(3.14 + 0, %, $a)
|
||||
|> xLine(3.14 + 0, %)
|
||||
|> angledLineOfXLength({ angle: 3 + 0, length: 3.14 + 0 }, %)
|
||||
|> angledLineOfYLength({ angle: 30 + 0, length: 3 + 0 }, %)
|
||||
@ -587,7 +587,7 @@ describe('Testing removeSingleConstraintInfo', () => {
|
||||
|> angledLineToY({ angle: 30 + 0, to: 10.14 + 0 }, %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 3.14 + 0,
|
||||
intersectTag: 'a',
|
||||
intersectTag: a,
|
||||
offset: 0 + 0
|
||||
}, %)
|
||||
|> tangentialArcTo([3.14 + 0, 13.14 + 0], %)`
|
||||
@ -601,7 +601,7 @@ describe('Testing removeSingleConstraintInfo', () => {
|
||||
['lineTo([6.14, 3.14 + 0], %)', 'arrayIndex', 0],
|
||||
['xLineTo(8, %)', '', ''],
|
||||
['yLineTo(5, %)', '', ''],
|
||||
["yLine(3.14, %, 'a')", '', ''],
|
||||
['yLine(3.14, %, $a)', '', ''],
|
||||
['xLine(3.14, %)', '', ''],
|
||||
[
|
||||
'angledLineOfXLength({ angle: 3, length: 3.14 + 0 }, %)',
|
||||
@ -627,7 +627,7 @@ describe('Testing removeSingleConstraintInfo', () => {
|
||||
`angledLineThatIntersects({
|
||||
angle: 3.14 + 0,
|
||||
offset: 0,
|
||||
intersectTag: 'a'
|
||||
intersectTag: a
|
||||
}, %)`,
|
||||
'objectProperty',
|
||||
'offset',
|
||||
|
@ -37,6 +37,7 @@ import { DefaultPlaneStr } from 'clientSideScene/sceneEntities'
|
||||
import { isOverlap, roundOff } from 'lib/utils'
|
||||
import { KCL_DEFAULT_CONSTANT_PREFIXES } from 'lib/constants'
|
||||
import { ConstrainInfo } from './std/stdTypes'
|
||||
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
|
||||
|
||||
export function startSketchOnDefault(
|
||||
node: Program,
|
||||
@ -379,7 +380,7 @@ export function sketchOnExtrudedFace(
|
||||
const { node: extrudeVarDec } = _node3
|
||||
const extrudeName = extrudeVarDec.id?.name
|
||||
|
||||
let _tag = ''
|
||||
let _tag = null
|
||||
if (cap === 'none') {
|
||||
const __tag = addTagForSketchOnFace(
|
||||
{
|
||||
@ -391,17 +392,17 @@ export function sketchOnExtrudedFace(
|
||||
)
|
||||
if (err(__tag)) return __tag
|
||||
const { modifiedAst, tag } = __tag
|
||||
_tag = tag
|
||||
_tag = createIdentifier(tag)
|
||||
_node = modifiedAst
|
||||
} else {
|
||||
_tag = cap.toUpperCase()
|
||||
_tag = createLiteral(cap.toUpperCase())
|
||||
}
|
||||
|
||||
const newSketch = createVariableDeclaration(
|
||||
newSketchName,
|
||||
createCallExpressionStdLib('startSketchOn', [
|
||||
createIdentifier(extrudeName ? extrudeName : oldSketchName),
|
||||
createLiteral(_tag),
|
||||
_tag,
|
||||
]),
|
||||
'const'
|
||||
)
|
||||
@ -483,6 +484,15 @@ export function createLiteral(value: string | number): Literal {
|
||||
}
|
||||
}
|
||||
|
||||
export function createTagDeclarator(value: string): TagDeclarator {
|
||||
return {
|
||||
type: 'TagDeclarator',
|
||||
start: 0,
|
||||
end: 0,
|
||||
value,
|
||||
}
|
||||
}
|
||||
|
||||
export function createIdentifier(name: string): Identifier {
|
||||
return {
|
||||
type: 'Identifier',
|
||||
@ -657,17 +667,18 @@ export function giveSketchFnCallTag(
|
||||
// Tag is always 3rd expression now, using arg index feels brittle
|
||||
// but we can come up with a better way to identify tag later.
|
||||
const thirdArg = primaryCallExp.arguments?.[2]
|
||||
const tagLiteral =
|
||||
thirdArg || (createLiteral(tag || findUniqueName(ast, 'seg', 2)) as Literal)
|
||||
const tagDeclarator =
|
||||
thirdArg ||
|
||||
(createTagDeclarator(tag || findUniqueName(ast, 'seg', 2)) as TagDeclarator)
|
||||
const isTagExisting = !!thirdArg
|
||||
if (!isTagExisting) {
|
||||
primaryCallExp.arguments[2] = tagLiteral
|
||||
primaryCallExp.arguments[2] = tagDeclarator
|
||||
}
|
||||
if ('value' in tagLiteral) {
|
||||
// Now TypeScript knows tagLiteral has a value property
|
||||
if ('value' in tagDeclarator) {
|
||||
// Now TypeScript knows tagDeclarator has a value property
|
||||
return {
|
||||
modifiedAst: ast,
|
||||
tag: String(tagLiteral.value),
|
||||
tag: String(tagDeclarator.value),
|
||||
isTagExisting,
|
||||
pathToNode: path,
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { parse, recast, initPromise } from './wasm'
|
||||
import { parse, recast, initPromise, PathToNode } from './wasm'
|
||||
import {
|
||||
findAllPreviousVariables,
|
||||
isNodeSafeToReplace,
|
||||
@ -9,6 +9,7 @@ import {
|
||||
findUsesOfTagInPipe,
|
||||
hasSketchPipeBeenExtruded,
|
||||
hasExtrudableGeometry,
|
||||
traverse,
|
||||
} from './queryAst'
|
||||
import { enginelessExecutor } from '../lib/testHelpers'
|
||||
import {
|
||||
@ -411,15 +412,15 @@ describe('Testing findUsesOfTagInPipe', () => {
|
||||
const exampleCode = `const part001 = startSketchOn('-XZ')
|
||||
|> startProfileAt([68.12, 156.65], %)
|
||||
|> line([306.21, 198.82], %)
|
||||
|> line([306.21, 198.85], %, 'seg01')
|
||||
|> angledLine([-65, segLen('seg01', %)], %)
|
||||
|> line([306.21, 198.85], %, $seg01)
|
||||
|> angledLine([-65, segLen(seg01, %)], %)
|
||||
|> line([306.21, 198.87], %)
|
||||
|> angledLine([65, segLen('seg01', %)], %)`
|
||||
|> angledLine([65, segLen(seg01, %)], %)`
|
||||
it('finds the current segment', async () => {
|
||||
const ast = parse(exampleCode)
|
||||
if (err(ast)) throw ast
|
||||
|
||||
const lineOfInterest = `198.85], %, 'seg01'`
|
||||
const lineOfInterest = `198.85], %, $seg01`
|
||||
const characterIndex =
|
||||
exampleCode.indexOf(lineOfInterest) + lineOfInterest.length
|
||||
const pathToNode = getNodePathFromSourceRange(ast, [
|
||||
@ -454,7 +455,7 @@ describe('Testing hasSketchPipeBeenExtruded', () => {
|
||||
|> line([2.48, 2.44], %)
|
||||
|> line([2.66, 1.17], %)
|
||||
|> line([3.75, 0.46], %)
|
||||
|> line([4.99, -0.46], %, 'seg01')
|
||||
|> line([4.99, -0.46], %, $seg01)
|
||||
|> line([3.3, -2.12], %)
|
||||
|> line([2.16, -3.33], %)
|
||||
|> line([0.85, -3.08], %)
|
||||
@ -463,7 +464,7 @@ describe('Testing hasSketchPipeBeenExtruded', () => {
|
||||
|> line([-17.67, 0.85], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(10, sketch001)
|
||||
const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
const sketch002 = startSketchOn(extrude001, $seg01)
|
||||
|> startProfileAt([-12.94, 6.6], %)
|
||||
|> line([2.45, -0.2], %)
|
||||
|> line([-2, -1.25], %)
|
||||
@ -473,7 +474,7 @@ const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
it('finds sketch001 pipe to be extruded', async () => {
|
||||
const ast = parse(exampleCode)
|
||||
if (err(ast)) throw ast
|
||||
const lineOfInterest = `line([4.99, -0.46], %, 'seg01')`
|
||||
const lineOfInterest = `line([4.99, -0.46], %, $seg01)`
|
||||
const characterIndex =
|
||||
exampleCode.indexOf(lineOfInterest) + lineOfInterest.length
|
||||
const extruded = hasSketchPipeBeenExtruded(
|
||||
@ -511,7 +512,7 @@ describe('Testing hasExtrudableGeometry', () => {
|
||||
|> line([-17.67, 0.85], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(10, sketch001)
|
||||
const sketch002 = startSketchOn(extrude001, 'seg01')
|
||||
const sketch002 = startSketchOn(extrude001, $seg01)
|
||||
|> startProfileAt([-12.94, 6.6], %)
|
||||
|> line([2.45, -0.2], %)
|
||||
|> line([-2, -1.25], %)
|
||||
@ -538,3 +539,53 @@ const extrude001 = extrude(10, sketch001)
|
||||
expect(extrudable).toBeFalsy()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Testing traverse and pathToNode', () => {
|
||||
it.each([
|
||||
['basic', '2.73'],
|
||||
[
|
||||
'very nested, array, object, callExpression, array, memberExpression',
|
||||
'.yo',
|
||||
],
|
||||
])('testing %s', async (testName, literalOfInterest) => {
|
||||
const code = `const myVar = 5
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([3.29, 7.86], %)
|
||||
|> line([2.48, 2.44], %)
|
||||
|> line([-3.86, -2.73], %)
|
||||
|> line([-17.67, 0.85], %)
|
||||
|> close(%)
|
||||
const bing = { yo: 55 }
|
||||
const myNestedVar = [
|
||||
{
|
||||
prop: line([bing.yo, 21], sketch001)
|
||||
}
|
||||
]
|
||||
`
|
||||
const ast = parse(code)
|
||||
if (err(ast)) throw ast
|
||||
let pathToNode: PathToNode = []
|
||||
traverse(ast, {
|
||||
enter: (node, path) => {
|
||||
if (
|
||||
node.type === 'Literal' &&
|
||||
String(node.value) === literalOfInterest
|
||||
) {
|
||||
pathToNode = path
|
||||
} else if (
|
||||
node.type === 'Identifier' &&
|
||||
literalOfInterest.includes(node.name)
|
||||
) {
|
||||
pathToNode = path
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
const literalIndex = code.indexOf(literalOfInterest)
|
||||
const pathToNode2 = getNodePathFromSourceRange(ast, [
|
||||
literalIndex + 2,
|
||||
literalIndex + 2,
|
||||
])
|
||||
expect(pathToNode).toEqual(pathToNode2)
|
||||
})
|
||||
})
|
||||
|
@ -270,6 +270,18 @@ function moreNodePathFromSourceRange(
|
||||
}
|
||||
}
|
||||
}
|
||||
if (_node.type === 'MemberExpression' && isInRange) {
|
||||
const { object, property } = _node
|
||||
if (object.start <= start && object.end >= end) {
|
||||
path.push(['object', 'MemberExpression'])
|
||||
return moreNodePathFromSourceRange(object, sourceRange, path)
|
||||
}
|
||||
if (property.start <= start && property.end >= end) {
|
||||
path.push(['property', 'MemberExpression'])
|
||||
return moreNodePathFromSourceRange(property, sourceRange, path)
|
||||
}
|
||||
return path
|
||||
}
|
||||
if (_node.type === 'PipeSubstitution' && isInRange) return path
|
||||
console.error('not implemented: ' + node.type)
|
||||
return path
|
||||
@ -307,48 +319,89 @@ type KCLNode =
|
||||
| ReturnStatement
|
||||
|
||||
export function traverse(
|
||||
node: KCLNode,
|
||||
node: KCLNode | Program,
|
||||
option: {
|
||||
enter?: (node: KCLNode) => void
|
||||
enter?: (node: KCLNode, pathToNode: PathToNode) => void
|
||||
leave?: (node: KCLNode) => void
|
||||
}
|
||||
},
|
||||
pathToNode: PathToNode = []
|
||||
) {
|
||||
option?.enter?.(node)
|
||||
const _traverse = (node: KCLNode) => traverse(node, option)
|
||||
const _node = node as KCLNode
|
||||
option?.enter?.(_node, pathToNode)
|
||||
const _traverse = (node: KCLNode, pathToNode: PathToNode) =>
|
||||
traverse(node, option, pathToNode)
|
||||
|
||||
if (node.type === 'VariableDeclaration') {
|
||||
node.declarations.forEach(_traverse)
|
||||
} else if (node.type === 'VariableDeclarator') {
|
||||
_traverse(node.init)
|
||||
} else if (node.type === 'PipeExpression') {
|
||||
node.body.forEach(_traverse)
|
||||
} else if (node.type === 'CallExpression') {
|
||||
_traverse(node.callee)
|
||||
node.arguments.forEach(_traverse)
|
||||
} else if (node.type === 'BinaryExpression') {
|
||||
_traverse(node.left)
|
||||
_traverse(node.right)
|
||||
} else if (node.type === 'Identifier') {
|
||||
if (_node.type === 'VariableDeclaration') {
|
||||
_node.declarations.forEach((declaration, index) =>
|
||||
_traverse(declaration, [
|
||||
...pathToNode,
|
||||
['declarations', 'VariableDeclaration'],
|
||||
[index, 'index'],
|
||||
])
|
||||
)
|
||||
} else if (_node.type === 'VariableDeclarator') {
|
||||
_traverse(_node.init, [...pathToNode, ['init', '']])
|
||||
} else if (_node.type === 'PipeExpression') {
|
||||
_node.body.forEach((expression, index) =>
|
||||
_traverse(expression, [
|
||||
...pathToNode,
|
||||
['body', 'PipeExpression'],
|
||||
[index, 'index'],
|
||||
])
|
||||
)
|
||||
} else if (_node.type === 'CallExpression') {
|
||||
_traverse(_node.callee, [...pathToNode, ['callee', 'CallExpression']])
|
||||
_node.arguments.forEach((arg, index) =>
|
||||
_traverse(arg, [
|
||||
...pathToNode,
|
||||
['arguments', 'CallExpression'],
|
||||
[index, 'index'],
|
||||
])
|
||||
)
|
||||
} else if (_node.type === 'BinaryExpression') {
|
||||
_traverse(_node.left, [...pathToNode, ['left', 'BinaryExpression']])
|
||||
_traverse(_node.right, [...pathToNode, ['right', 'BinaryExpression']])
|
||||
} else if (_node.type === 'Identifier') {
|
||||
// do nothing
|
||||
} else if (node.type === 'Literal') {
|
||||
} else if (_node.type === 'Literal') {
|
||||
// do nothing
|
||||
} else if (node.type === 'ArrayExpression') {
|
||||
node.elements.forEach(_traverse)
|
||||
} else if (node.type === 'ObjectExpression') {
|
||||
node.properties.forEach(({ key, value }) => {
|
||||
_traverse(key)
|
||||
_traverse(value)
|
||||
} else if (_node.type === 'TagDeclarator') {
|
||||
// do nothing
|
||||
} else if (_node.type === 'ArrayExpression') {
|
||||
_node.elements.forEach((el, index) =>
|
||||
_traverse(el, [
|
||||
...pathToNode,
|
||||
['elements', 'ArrayExpression'],
|
||||
[index, 'index'],
|
||||
])
|
||||
)
|
||||
} else if (_node.type === 'ObjectExpression') {
|
||||
_node.properties.forEach(({ key, value }, index) => {
|
||||
_traverse(key, [
|
||||
...pathToNode,
|
||||
['properties', 'ObjectExpression'],
|
||||
[index, 'index'],
|
||||
['key', 'Property'],
|
||||
])
|
||||
_traverse(value, [
|
||||
...pathToNode,
|
||||
['properties', 'ObjectExpression'],
|
||||
[index, 'index'],
|
||||
['value', 'Property'],
|
||||
])
|
||||
})
|
||||
} else if (node.type === 'UnaryExpression') {
|
||||
_traverse(node.argument)
|
||||
} else if (node.type === 'MemberExpression') {
|
||||
} else if (_node.type === 'UnaryExpression') {
|
||||
_traverse(_node.argument, [...pathToNode, ['argument', 'UnaryExpression']])
|
||||
} else if (_node.type === 'MemberExpression') {
|
||||
// hmm this smell
|
||||
_traverse(node.object)
|
||||
_traverse(node.property)
|
||||
} else if ('body' in node && Array.isArray(node.body)) {
|
||||
node.body.forEach(_traverse)
|
||||
_traverse(_node.object, [...pathToNode, ['object', 'MemberExpression']])
|
||||
_traverse(_node.property, [...pathToNode, ['property', 'MemberExpression']])
|
||||
} else if ('body' in _node && Array.isArray(_node.body)) {
|
||||
_node.body.forEach((expression, index) =>
|
||||
_traverse(expression, [...pathToNode, ['body', ''], [index, 'index']])
|
||||
)
|
||||
}
|
||||
option?.leave?.(node)
|
||||
option?.leave?.(_node)
|
||||
}
|
||||
|
||||
export interface PrevVariable<T> {
|
||||
@ -734,8 +787,14 @@ export function findUsesOfTagInPipe(
|
||||
if (node.type !== 'CallExpression') return []
|
||||
const tagIndex = node.callee.name === 'close' ? 1 : 2
|
||||
const thirdParam = node.arguments[tagIndex]
|
||||
if (thirdParam?.type !== 'Literal') return []
|
||||
const tag = String(thirdParam.value)
|
||||
if (
|
||||
!(thirdParam?.type === 'TagDeclarator' || thirdParam?.type === 'Identifier')
|
||||
)
|
||||
return []
|
||||
const tag =
|
||||
thirdParam?.type === 'TagDeclarator'
|
||||
? String(thirdParam.value)
|
||||
: thirdParam.name
|
||||
|
||||
const varDec = getNodeFromPath<VariableDeclaration>(
|
||||
ast,
|
||||
@ -756,9 +815,11 @@ export function findUsesOfTagInPipe(
|
||||
)
|
||||
return
|
||||
const tagArg = node.arguments[0]
|
||||
if (tagArg.type !== 'Literal') return
|
||||
if (String(tagArg.value) === tag)
|
||||
dependentRanges.push([node.start, node.end])
|
||||
if (!(tagArg.type === 'TagDeclarator' || tagArg.type === 'Identifier'))
|
||||
return
|
||||
const tagArgValue =
|
||||
tagArg.type === 'TagDeclarator' ? String(tagArg.value) : tagArg.name
|
||||
if (tagArgValue === tag) dependentRanges.push([node.start, node.end])
|
||||
},
|
||||
})
|
||||
return dependentRanges
|
||||
|
@ -76,9 +76,9 @@ log(5, myVar)
|
||||
})
|
||||
it('recast sketch declaration', () => {
|
||||
let code = `const mySketch = startSketchAt([0, 0])
|
||||
|> lineTo([0, 1], %, "myPath")
|
||||
|> lineTo([0, 1], %, $myPath)
|
||||
|> lineTo([1, 1], %)
|
||||
|> lineTo([1, 0], %, "rightPath")
|
||||
|> lineTo([1, 0], %, $rightPath)
|
||||
|> close(%)
|
||||
`
|
||||
const { ast } = code2ast(code)
|
||||
@ -90,7 +90,7 @@ log(5, myVar)
|
||||
const code = [
|
||||
'const mySk1 = startSketchAt([0, 0])',
|
||||
' |> lineTo([1, 1], %)',
|
||||
' |> lineTo([0, 1], %, "myTag")',
|
||||
' |> lineTo([0, 1], %, $myTag)',
|
||||
' |> lineTo([1, 1], %)',
|
||||
' |> rx(90, %)',
|
||||
].join('\n')
|
||||
@ -266,7 +266,7 @@ const key = 'c'
|
||||
const code = [
|
||||
'const mySk1 = startSketchAt([0, 0])',
|
||||
' |> lineTo([1, 1], %)',
|
||||
' |> lineTo([0, 1], %, "myTag")',
|
||||
' |> lineTo([0, 1], %, $myTag)',
|
||||
' |> lineTo([1, 1], %)',
|
||||
' // a comment',
|
||||
' |> rx(90, %)',
|
||||
@ -283,7 +283,7 @@ const key = 'c'
|
||||
const mySk1 = startSketchAt([0, 0])
|
||||
|> lineTo([1, 1], %)
|
||||
// comment here
|
||||
|> lineTo([0, 1], %, 'myTag')
|
||||
|> lineTo([0, 1], %, $myTag)
|
||||
|> lineTo([1, 1], %) /* and
|
||||
here
|
||||
*/
|
||||
@ -306,7 +306,7 @@ one more for good measure
|
||||
const mySk1 = startSketchAt([0, 0])
|
||||
|> lineTo([1, 1], %)
|
||||
// comment here
|
||||
|> lineTo([0, 1], %, 'myTag')
|
||||
|> lineTo([0, 1], %, $myTag)
|
||||
|> lineTo([1, 1], %) /* and
|
||||
here */
|
||||
// a comment between pipe expression statements
|
||||
@ -356,12 +356,12 @@ describe('testing call Expressions in BinaryExpressions and UnaryExpressions', (
|
||||
describe('it recasts wrapped object expressions in pipe bodies with correct indentation', () => {
|
||||
it('with a single line', () => {
|
||||
const code = `const part001 = startSketchAt([-0.01, -0.08])
|
||||
|> line([0.62, 4.15], %, 'seg01')
|
||||
|> line([0.62, 4.15], %, $seg01)
|
||||
|> line([2.77, -1.24], %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 201,
|
||||
offset: -1.35,
|
||||
intersectTag: 'seg01'
|
||||
intersectTag: $seg01
|
||||
}, %)
|
||||
|> line([-0.42, -1.72], %)
|
||||
`
|
||||
@ -374,7 +374,7 @@ describe('it recasts wrapped object expressions in pipe bodies with correct inde
|
||||
const code = `angledLineThatIntersects({
|
||||
angle: 201,
|
||||
offset: -1.35,
|
||||
intersectTag: 'seg01'
|
||||
intersectTag: $seg01
|
||||
}, %)
|
||||
`
|
||||
const { ast } = code2ast(code)
|
||||
|
@ -1249,6 +1249,29 @@ export class EngineCommandManager extends EventTarget {
|
||||
})
|
||||
|
||||
this.initPlanes().then(async () => {
|
||||
// Hide the grid and grid scale text.
|
||||
this.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'object_visible' as any,
|
||||
// Found in engine/constants.h
|
||||
object_id: 'cfa78409-653d-4c26-96f1-7c45fb784840',
|
||||
hidden: true,
|
||||
},
|
||||
})
|
||||
|
||||
this.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'object_visible' as any,
|
||||
// Found in engine/constants.h
|
||||
object_id: '10782f33-f588-4668-8bcd-040502d26590',
|
||||
hidden: true,
|
||||
},
|
||||
})
|
||||
|
||||
this.resolveReady()
|
||||
setIsStreamReady(true)
|
||||
await executeCode()
|
||||
|
@ -230,7 +230,7 @@ describe('testing addTagForSketchOnFace', () => {
|
||||
if (err(sketchOnFaceRetVal)) return sketchOnFaceRetVal
|
||||
|
||||
const { modifiedAst } = sketchOnFaceRetVal
|
||||
const expectedCode = genCode("lineTo([-1.59, -1.54], %, 'seg01')")
|
||||
const expectedCode = genCode('lineTo([-1.59, -1.54], %, $seg01)')
|
||||
expect(recast(modifiedAst)).toBe(expectedCode)
|
||||
})
|
||||
})
|
||||
|
@ -11,6 +11,7 @@ import {
|
||||
Value,
|
||||
Literal,
|
||||
VariableDeclaration,
|
||||
Identifier,
|
||||
} from 'lang/wasm'
|
||||
import {
|
||||
getNodeFromPath,
|
||||
@ -40,6 +41,7 @@ import {
|
||||
|
||||
import {
|
||||
createLiteral,
|
||||
createTagDeclarator,
|
||||
createCallExpression,
|
||||
createArrayExpression,
|
||||
createPipeSubstitution,
|
||||
@ -51,6 +53,7 @@ import {
|
||||
import { roundOff, getLength, getAngle } from 'lib/utils'
|
||||
import { err } from 'lib/trap'
|
||||
import { perpendicularDistance } from 'sketch-helpers'
|
||||
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
|
||||
|
||||
export type Coords2d = [number, number]
|
||||
|
||||
@ -1406,7 +1409,7 @@ export const angledLineThatIntersects: SketchLineHelper = {
|
||||
?.value || createLiteral('')
|
||||
: createLiteral('')
|
||||
const intersectTagName =
|
||||
intersectTag.type === 'Literal' ? intersectTag.value : ''
|
||||
intersectTag.type === 'Identifier' ? intersectTag.name : ''
|
||||
const nodeMeta2 = getNodeFromPath<VariableDeclaration>(
|
||||
_node,
|
||||
pathToNode,
|
||||
@ -1497,23 +1500,23 @@ export const angledLineThatIntersects: SketchLineHelper = {
|
||||
)
|
||||
}
|
||||
if (intersectTag !== -1) {
|
||||
const tag = firstArg.properties[intersectTag]?.value
|
||||
const tag = firstArg.properties[intersectTag]?.value as Identifier
|
||||
const pathToTagProp: PathToNode = [
|
||||
...pathToObjectExp,
|
||||
[intersectTag, 'index'],
|
||||
['value', 'Property'],
|
||||
]
|
||||
returnVal.push(
|
||||
constrainInfo(
|
||||
'intersectionTag',
|
||||
isNotLiteralArrayOrStatic(tag),
|
||||
code.slice(tag.start, tag.end),
|
||||
'angledLineThatIntersects',
|
||||
'intersectTag',
|
||||
[tag.start, tag.end],
|
||||
pathToTagProp
|
||||
)
|
||||
const info = constrainInfo(
|
||||
'intersectionTag',
|
||||
// This will always be a tag identifier.
|
||||
false,
|
||||
code.slice(tag.start, tag.end),
|
||||
'angledLineThatIntersects',
|
||||
'intersectTag',
|
||||
[tag.start, tag.end],
|
||||
pathToTagProp
|
||||
)
|
||||
returnVal.push(info)
|
||||
}
|
||||
return returnVal
|
||||
},
|
||||
@ -1830,17 +1833,18 @@ function addTag(tagIndex = 2): addTagFn {
|
||||
// Tag is always 3rd expression now, using arg index feels brittle
|
||||
// but we can come up with a better way to identify tag later.
|
||||
const thirdArg = primaryCallExp.arguments?.[tagIndex]
|
||||
const tagLiteral =
|
||||
thirdArg || (createLiteral(findUniqueName(_node, 'seg', 2)) as Literal)
|
||||
const tagDeclarator =
|
||||
thirdArg ||
|
||||
(createTagDeclarator(findUniqueName(_node, 'seg', 2)) as TagDeclarator)
|
||||
const isTagExisting = !!thirdArg
|
||||
if (!isTagExisting) {
|
||||
primaryCallExp.arguments[tagIndex] = tagLiteral
|
||||
primaryCallExp.arguments[tagIndex] = tagDeclarator
|
||||
}
|
||||
if ('value' in tagLiteral) {
|
||||
// Now TypeScript knows tagLiteral has a value property
|
||||
if ('value' in tagDeclarator) {
|
||||
// Now TypeScript knows tagDeclarator has a value property
|
||||
return {
|
||||
modifiedAst: _node,
|
||||
tag: String(tagLiteral.value),
|
||||
tag: String(tagDeclarator.value),
|
||||
}
|
||||
} else {
|
||||
return new Error('Unable to assign tag without value')
|
||||
|
@ -65,17 +65,17 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
const bigExampleArr = [
|
||||
`const part001 = startSketchOn('XY')`,
|
||||
` |> startProfileAt([0, 0], %)`,
|
||||
` |> lineTo([1, 1], %, 'abc1')`,
|
||||
` |> line([-2.04, -0.7], %, 'abc2')`,
|
||||
` |> angledLine({ angle: 157, length: 1.69 }, %, 'abc3')`,
|
||||
` |> angledLineOfXLength({ angle: 217, length: 0.86 }, %, 'abc4')`,
|
||||
` |> angledLineOfYLength({ angle: 104, length: 1.58 }, %, 'abc5')`,
|
||||
` |> angledLineToX({ angle: 55, to: -2.89 }, %, 'abc6')`,
|
||||
` |> angledLineToY({ angle: 330, to: 2.53 }, %, 'abc7')`,
|
||||
` |> xLine(1.47, %, 'abc8')`,
|
||||
` |> yLine(1.57, %, 'abc9')`,
|
||||
` |> xLineTo(1.49, %, 'abc10')`,
|
||||
` |> yLineTo(2.64, %, 'abc11')`,
|
||||
` |> lineTo([1, 1], %, $abc1)`,
|
||||
` |> line([-2.04, -0.7], %, $abc2)`,
|
||||
` |> angledLine({ angle: 157, length: 1.69 }, %, $abc3)`,
|
||||
` |> angledLineOfXLength({ angle: 217, length: 0.86 }, %, $abc4)`,
|
||||
` |> angledLineOfYLength({ angle: 104, length: 1.58 }, %, $abc5)`,
|
||||
` |> angledLineToX({ angle: 55, to: -2.89 }, %, $abc6)`,
|
||||
` |> angledLineToY({ angle: 330, to: 2.53 }, %, $abc7)`,
|
||||
` |> xLine(1.47, %, $abc8)`,
|
||||
` |> yLine(1.57, %, $abc9)`,
|
||||
` |> xLineTo(1.49, %, $abc10)`,
|
||||
` |> yLineTo(2.64, %, $abc11)`,
|
||||
` |> lineTo([2.55, 3.58], %) // lineTo`,
|
||||
` |> line([0.73, -0.75], %)`,
|
||||
` |> angledLine([63, 1.38], %) // angledLine`,
|
||||
@ -90,8 +90,8 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
]
|
||||
const bigExample = bigExampleArr.join('\n')
|
||||
it('line with tag converts to xLine', async () => {
|
||||
const callToSwap = "line([-2.04, -0.7], %, 'abc2')"
|
||||
const expectedLine = "xLine(-2.04, %, 'abc2')"
|
||||
const callToSwap = 'line([-2.04, -0.7], %, $abc2)'
|
||||
const expectedLine = 'xLine(-2.04, %, $abc2)'
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap,
|
||||
@ -116,10 +116,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('lineTo with tag converts to xLineTo', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap: "lineTo([1, 1], %, 'abc1')",
|
||||
callToSwap: 'lineTo([1, 1], %, $abc1)',
|
||||
constraintType: 'horizontal',
|
||||
})
|
||||
const expectedLine = "xLineTo(1, %, 'abc1')"
|
||||
const expectedLine = 'xLineTo(1, %, $abc1)'
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
expect(originalRange[0]).toBe(newCode.indexOf(expectedLine))
|
||||
@ -138,10 +138,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('angledLine with tag converts to xLine', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap: "angledLine({ angle: 157, length: 1.69 }, %, 'abc3')",
|
||||
callToSwap: 'angledLine({ angle: 157, length: 1.69 }, %, $abc3)',
|
||||
constraintType: 'horizontal',
|
||||
})
|
||||
const expectedLine = "xLine(-1.56, %, 'abc3')"
|
||||
const expectedLine = 'xLine(-1.56, %, $abc3)'
|
||||
console.log(newCode)
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
@ -161,11 +161,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('angledLineOfXLength with tag converts to xLine', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap:
|
||||
"angledLineOfXLength({ angle: 217, length: 0.86 }, %, 'abc4')",
|
||||
callToSwap: 'angledLineOfXLength({ angle: 217, length: 0.86 }, %, $abc4)',
|
||||
constraintType: 'horizontal',
|
||||
})
|
||||
const expectedLine = "xLine(-0.86, %, 'abc4')"
|
||||
const expectedLine = 'xLine(-0.86, %, $abc4)'
|
||||
// hmm "-0.86" is correct since the angle is 104, but need to make sure this is compatible `-myVar`
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
@ -185,11 +184,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('angledLineOfYLength with tag converts to yLine', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap:
|
||||
"angledLineOfYLength({ angle: 104, length: 1.58 }, %, 'abc5')",
|
||||
callToSwap: 'angledLineOfYLength({ angle: 104, length: 1.58 }, %, $abc5)',
|
||||
constraintType: 'vertical',
|
||||
})
|
||||
const expectedLine = "yLine(1.58, %, 'abc5')"
|
||||
const expectedLine = 'yLine(1.58, %, $abc5)'
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
expect(originalRange[0]).toBe(newCode.indexOf(expectedLine))
|
||||
@ -208,10 +206,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('angledLineToX with tag converts to xLineTo', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap: "angledLineToX({ angle: 55, to: -2.89 }, %, 'abc6')",
|
||||
callToSwap: 'angledLineToX({ angle: 55, to: -2.89 }, %, $abc6)',
|
||||
constraintType: 'horizontal',
|
||||
})
|
||||
const expectedLine = "xLineTo(-2.89, %, 'abc6')"
|
||||
const expectedLine = 'xLineTo(-2.89, %, $abc6)'
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
expect(originalRange[0]).toBe(newCode.indexOf(expectedLine))
|
||||
@ -230,10 +228,10 @@ describe('testing swapping out sketch calls with xLine/xLineTo', () => {
|
||||
it('angledLineToY with tag converts to yLineTo', async () => {
|
||||
const { newCode, originalRange } = await testingSwapSketchFnCall({
|
||||
inputCode: bigExample,
|
||||
callToSwap: "angledLineToY({ angle: 330, to: 2.53 }, %, 'abc7')",
|
||||
callToSwap: 'angledLineToY({ angle: 330, to: 2.53 }, %, $abc7)',
|
||||
constraintType: 'vertical',
|
||||
})
|
||||
const expectedLine = "yLineTo(2.53, %, 'abc7')"
|
||||
const expectedLine = 'yLineTo(2.53, %, $abc7)'
|
||||
expect(newCode).toContain(expectedLine)
|
||||
// new line should start at the same place as the old line
|
||||
expect(originalRange[0]).toBe(newCode.indexOf(expectedLine))
|
||||
|
@ -139,70 +139,70 @@ const myAng = 40
|
||||
const myAng2 = 134
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([1, 3.82], %, 'seg01') // ln-should-get-tag
|
||||
|> line([1, 3.82], %, $seg01) // ln-should-get-tag
|
||||
|> angledLineToX([
|
||||
-angleToMatchLengthX('seg01', myVar, %),
|
||||
-angleToMatchLengthX(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||
|> angledLineToY([
|
||||
-angleToMatchLengthY('seg01', myVar, %),
|
||||
-angleToMatchLengthY(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||
|> angledLine([45, segLen('seg01', %)], %) // ln-lineTo-free should become angledLine
|
||||
|> angledLine([45, segLen('seg01', %)], %) // ln-angledLineToX-free should become angledLine
|
||||
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineToX-angle should become angledLine
|
||||
|> angledLine([45, segLen(seg01, %)], %) // ln-lineTo-free should become angledLine
|
||||
|> angledLine([45, segLen(seg01, %)], %) // ln-angledLineToX-free should become angledLine
|
||||
|> angledLine([myAng, segLen(seg01, %)], %) // ln-angledLineToX-angle should become angledLine
|
||||
|> angledLineToX([
|
||||
angleToMatchLengthX('seg01', myVar2, %),
|
||||
angleToMatchLengthX(seg01, myVar2, %),
|
||||
myVar2
|
||||
], %) // ln-angledLineToX-xAbsolute should use angleToMatchLengthX to get angle
|
||||
|> angledLine([-45, segLen('seg01', %)], %) // ln-angledLineToY-free should become angledLine
|
||||
|> angledLine([myAng2, segLen('seg01', %)], %) // ln-angledLineToY-angle should become angledLine
|
||||
|> angledLine([-45, segLen(seg01, %)], %) // ln-angledLineToY-free should become angledLine
|
||||
|> angledLine([myAng2, segLen(seg01, %)], %) // ln-angledLineToY-angle should become angledLine
|
||||
|> angledLineToY([
|
||||
angleToMatchLengthY('seg01', myVar3, %),
|
||||
angleToMatchLengthY(seg01, myVar3, %),
|
||||
myVar3
|
||||
], %) // ln-angledLineToY-yAbsolute should use angleToMatchLengthY to get angle
|
||||
|> line([
|
||||
min(segLen('seg01', %), myVar),
|
||||
legLen(segLen('seg01', %), myVar)
|
||||
min(segLen(seg01, %), myVar),
|
||||
legLen(segLen(seg01, %), myVar)
|
||||
], %) // ln-should use legLen for y
|
||||
|> line([
|
||||
min(segLen('seg01', %), myVar),
|
||||
-legLen(segLen('seg01', %), myVar)
|
||||
min(segLen(seg01, %), myVar),
|
||||
-legLen(segLen(seg01, %), myVar)
|
||||
], %) // ln-legLen but negative
|
||||
|> angledLine([-112, segLen('seg01', %)], %) // ln-should become angledLine
|
||||
|> angledLine([myVar, segLen('seg01', %)], %) // ln-use segLen for second arg
|
||||
|> angledLine([45, segLen('seg01', %)], %) // ln-segLen again
|
||||
|> angledLine([54, segLen('seg01', %)], %) // ln-should be transformed to angledLine
|
||||
|> angledLine([-112, segLen(seg01, %)], %) // ln-should become angledLine
|
||||
|> angledLine([myVar, segLen(seg01, %)], %) // ln-use segLen for second arg
|
||||
|> angledLine([45, segLen(seg01, %)], %) // ln-segLen again
|
||||
|> angledLine([54, segLen(seg01, %)], %) // ln-should be transformed to angledLine
|
||||
|> angledLineOfXLength([
|
||||
legAngX(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
legAngX(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-should use legAngX to calculate angle
|
||||
|> angledLineOfXLength([
|
||||
180 + legAngX(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
180 + legAngX(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-same as above but should have + 180 to match original quadrant
|
||||
|> line([
|
||||
legLen(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
legLen(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-legLen again but yRelative
|
||||
|> line([
|
||||
-legLen(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
-legLen(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-negative legLen yRelative
|
||||
|> angledLine([58, segLen('seg01', %)], %) // ln-angledLineOfYLength-free should become angledLine
|
||||
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineOfYLength-angle should become angledLine
|
||||
|> angledLine([58, segLen(seg01, %)], %) // ln-angledLineOfYLength-free should become angledLine
|
||||
|> angledLine([myAng, segLen(seg01, %)], %) // ln-angledLineOfYLength-angle should become angledLine
|
||||
|> angledLineOfXLength([
|
||||
legAngY(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
legAngY(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-angledLineOfYLength-yRelative use legAngY
|
||||
|> angledLineOfXLength([
|
||||
270 + legAngY(segLen('seg01', %), myVar),
|
||||
min(segLen('seg01', %), myVar)
|
||||
270 + legAngY(segLen(seg01, %), myVar),
|
||||
min(segLen(seg01, %), myVar)
|
||||
], %) // ln-angledLineOfYLength-yRelative with angle > 90 use binExp
|
||||
|> xLine(segLen('seg01', %), %) // ln-xLine-free should sub in segLen
|
||||
|> yLine(segLen('seg01', %), %) // ln-yLine-free should sub in segLen
|
||||
|> xLine(segLen('seg01', %), %) // ln-xLineTo-free should convert to xLine
|
||||
|> yLine(segLen('seg01', %), %) // ln-yLineTo-free should convert to yLine
|
||||
|> xLine(segLen(seg01, %), %) // ln-xLine-free should sub in segLen
|
||||
|> yLine(segLen(seg01, %), %) // ln-yLine-free should sub in segLen
|
||||
|> xLine(segLen(seg01, %), %) // ln-xLineTo-free should convert to xLine
|
||||
|> yLine(segLen(seg01, %), %) // ln-yLineTo-free should convert to yLine
|
||||
`
|
||||
it('should transform the ast', async () => {
|
||||
const ast = parse(inputScript)
|
||||
@ -417,10 +417,10 @@ const part001 = startSketchOn('XY')
|
||||
'setVertDistance'
|
||||
)
|
||||
expect(expectedHorizontalCode).toContain(
|
||||
`lineTo([segEndX('seg01', %) + 0.9, 4.59], %) // free`
|
||||
`lineTo([segEndX(seg01, %) + 0.9, 4.59], %) // free`
|
||||
)
|
||||
expect(expectedVerticalCode).toContain(
|
||||
`lineTo([1.21, segEndY('seg01', %) + 2.92], %) // free`
|
||||
`lineTo([1.21, segEndY(seg01, %) + 2.92], %) // free`
|
||||
)
|
||||
})
|
||||
it('testing for xRelative to vertical distance', async () => {
|
||||
@ -431,7 +431,7 @@ const part001 = startSketchOn('XY')
|
||||
)
|
||||
expect(expectedCode).toContain(`|> lineTo([
|
||||
lastSegX(%) + myVar,
|
||||
segEndY('seg01', %) + 2.93
|
||||
segEndY(seg01, %) + 2.93
|
||||
], %) // xRelative`)
|
||||
})
|
||||
it('testing for yRelative to horizontal distance', async () => {
|
||||
@ -441,7 +441,7 @@ const part001 = startSketchOn('XY')
|
||||
'setHorzDistance'
|
||||
)
|
||||
expect(expectedCode).toContain(`|> lineTo([
|
||||
segEndX('seg01', %) + 2.6,
|
||||
segEndX(seg01, %) + 2.6,
|
||||
lastSegY(%) + myVar
|
||||
], %) // yRelative`)
|
||||
})
|
||||
|
@ -462,7 +462,7 @@ const setAngledIntersectLineForLines: TransformInfo['createNode'] =
|
||||
angleVal,
|
||||
offsetVal:
|
||||
forceValueUsedInTransform || createLiteral(valueUsedInTransform),
|
||||
intersectTag: createLiteral(referenceSegName),
|
||||
intersectTag: createIdentifier(referenceSegName),
|
||||
tag,
|
||||
valueUsedInTransform,
|
||||
})
|
||||
@ -481,7 +481,7 @@ const setAngledIntersectForAngledLines: TransformInfo['createNode'] =
|
||||
angleVal: varValA,
|
||||
offsetVal:
|
||||
forceValueUsedInTransform || createLiteral(valueUsedInTransform),
|
||||
intersectTag: createLiteral(referenceSegName),
|
||||
intersectTag: createIdentifier(referenceSegName),
|
||||
tag,
|
||||
valueUsedInTransform,
|
||||
})
|
||||
@ -675,7 +675,7 @@ const transformMap: TransformMap = {
|
||||
const angleToMatchLengthXCall = createCallExpression(
|
||||
'angleToMatchLengthX',
|
||||
[
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
varValA,
|
||||
createPipeSubstitution(),
|
||||
]
|
||||
@ -708,7 +708,7 @@ const transformMap: TransformMap = {
|
||||
const angleToMatchLengthYCall = createCallExpression(
|
||||
'angleToMatchLengthY',
|
||||
[
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
varValB,
|
||||
createPipeSubstitution(),
|
||||
]
|
||||
@ -973,7 +973,7 @@ const transformMap: TransformMap = {
|
||||
const angleToMatchLengthXCall = createCallExpression(
|
||||
'angleToMatchLengthX',
|
||||
[
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
varValB,
|
||||
createPipeSubstitution(),
|
||||
]
|
||||
@ -1023,7 +1023,7 @@ const transformMap: TransformMap = {
|
||||
const angleToMatchLengthXCall = createCallExpression(
|
||||
'angleToMatchLengthY',
|
||||
[
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
varValB,
|
||||
createPipeSubstitution(),
|
||||
]
|
||||
@ -1174,7 +1174,7 @@ export function getRemoveConstraintsTransform(
|
||||
// fnName: name,
|
||||
// angleVal: args[0],
|
||||
// offsetVal: args[1],
|
||||
// intersectTag: createLiteral(referenceSegName),
|
||||
// intersectTag: createIdentifier(referenceSegName),
|
||||
// tag,
|
||||
// })
|
||||
// }
|
||||
@ -1593,8 +1593,8 @@ export function transformAstSketchLines({
|
||||
const _referencedSegmentName =
|
||||
referenceSegName ||
|
||||
(_referencedSegmentNameVal &&
|
||||
_referencedSegmentNameVal.type === 'Literal' &&
|
||||
String(_referencedSegmentNameVal.value)) ||
|
||||
_referencedSegmentNameVal.type === 'Identifier' &&
|
||||
String(_referencedSegmentNameVal.name)) ||
|
||||
''
|
||||
const { val } = firstArg
|
||||
const [varValA, varValB] = Array.isArray(val) ? val : [val, val]
|
||||
@ -1714,21 +1714,21 @@ export function transformAstSketchLines({
|
||||
|
||||
function createSegLen(referenceSegName: string): Value {
|
||||
return createCallExpression('segLen', [
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
createPipeSubstitution(),
|
||||
])
|
||||
}
|
||||
|
||||
function createSegAngle(referenceSegName: string): Value {
|
||||
return createCallExpression('segAng', [
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
createPipeSubstitution(),
|
||||
])
|
||||
}
|
||||
|
||||
function createSegEnd(referenceSegName: string, isX: boolean): CallExpression {
|
||||
return createCallExpression(isX ? 'segEndX' : 'segEndY', [
|
||||
createLiteral(referenceSegName),
|
||||
createIdentifier(referenceSegName),
|
||||
createPipeSubstitution(),
|
||||
])
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ import init, {
|
||||
parse_wasm,
|
||||
recast_wasm,
|
||||
execute_wasm,
|
||||
kcl_lint,
|
||||
lexer_wasm,
|
||||
modify_ast_for_sketch_wasm,
|
||||
is_points_ccw,
|
||||
@ -20,6 +21,7 @@ import { KCLError } from './errors'
|
||||
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
||||
import { EngineCommandManager } from './std/engineConnection'
|
||||
import { ProgramReturn } from '../wasm-lib/kcl/bindings/ProgramReturn'
|
||||
import { Discovered } from '../wasm-lib/kcl/bindings/Discovered'
|
||||
import { MemoryItem } from '../wasm-lib/kcl/bindings/MemoryItem'
|
||||
import type { Program } from '../wasm-lib/kcl/bindings/Program'
|
||||
import type { Token } from '../wasm-lib/kcl/bindings/Token'
|
||||
@ -205,6 +207,17 @@ export const _executor = async (
|
||||
}
|
||||
}
|
||||
|
||||
export const kclLint = async (ast: Program): Promise<Array<Discovered>> => {
|
||||
try {
|
||||
const discovered_findings: Array<Discovered> = await kcl_lint(
|
||||
JSON.stringify(ast)
|
||||
)
|
||||
return discovered_findings
|
||||
} catch (e: any) {
|
||||
return Promise.reject(e)
|
||||
}
|
||||
}
|
||||
|
||||
export const recast = (ast: Program): string | Error => {
|
||||
return recast_wasm(JSON.stringify(ast))
|
||||
}
|
||||
|
@ -22,20 +22,20 @@ const filletR = 0.25
|
||||
// Sketch the bracket and extrude with fillets
|
||||
const bracket = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([0, wallMountL], %, 'outerEdge')
|
||||
|> line([0, wallMountL], %, $outerEdge)
|
||||
|> line([-shelfMountL, 0], %)
|
||||
|> line([0, -thickness], %)
|
||||
|> line([shelfMountL - thickness, 0], %, 'innerEdge')
|
||||
|> line([shelfMountL - thickness, 0], %, $innerEdge)
|
||||
|> line([0, -wallMountL + thickness], %)
|
||||
|> close(%)
|
||||
|> extrude(width, %)
|
||||
|> fillet({
|
||||
radius: filletR,
|
||||
tags: [getPreviousAdjacentEdge('innerEdge', %)]
|
||||
tags: [getPreviousAdjacentEdge(innerEdge, %)]
|
||||
}, %)
|
||||
|> fillet({
|
||||
radius: filletR + thickness,
|
||||
tags: [getPreviousAdjacentEdge('outerEdge', %)]
|
||||
tags: [getPreviousAdjacentEdge(outerEdge, %)]
|
||||
}, %)`
|
||||
|
||||
function findLineInExampleCode({
|
||||
|
@ -2,8 +2,10 @@ import {
|
||||
createArrayExpression,
|
||||
createBinaryExpression,
|
||||
createCallExpressionStdLib,
|
||||
createIdentifier,
|
||||
createLiteral,
|
||||
createPipeSubstitution,
|
||||
createTagDeclarator,
|
||||
createUnaryExpression,
|
||||
} from 'lang/modifyAst'
|
||||
import { roundOff } from './utils'
|
||||
@ -35,13 +37,13 @@ export const getRectangleCallExpressions = (
|
||||
createLiteral(0), // This will be the width of the rectangle
|
||||
]),
|
||||
createPipeSubstitution(),
|
||||
createLiteral(tags[0]),
|
||||
createTagDeclarator(tags[0]),
|
||||
]),
|
||||
createCallExpressionStdLib('angledLine', [
|
||||
createArrayExpression([
|
||||
createBinaryExpression([
|
||||
createCallExpressionStdLib('segAng', [
|
||||
createLiteral(tags[0]),
|
||||
createIdentifier(tags[0]),
|
||||
createPipeSubstitution(),
|
||||
]),
|
||||
'+',
|
||||
@ -50,24 +52,24 @@ export const getRectangleCallExpressions = (
|
||||
createLiteral(0), // This will be the height of the rectangle
|
||||
]),
|
||||
createPipeSubstitution(),
|
||||
createLiteral(tags[1]),
|
||||
createTagDeclarator(tags[1]),
|
||||
]),
|
||||
createCallExpressionStdLib('angledLine', [
|
||||
createArrayExpression([
|
||||
createCallExpressionStdLib('segAng', [
|
||||
createLiteral(tags[0]),
|
||||
createIdentifier(tags[0]),
|
||||
createPipeSubstitution(),
|
||||
]), // same angle as the first line
|
||||
createUnaryExpression(
|
||||
createCallExpressionStdLib('segLen', [
|
||||
createLiteral(tags[0]),
|
||||
createIdentifier(tags[0]),
|
||||
createPipeSubstitution(),
|
||||
]),
|
||||
'-'
|
||||
), // negative height
|
||||
]),
|
||||
createPipeSubstitution(),
|
||||
createLiteral(tags[2]),
|
||||
createTagDeclarator(tags[2]),
|
||||
]),
|
||||
createCallExpressionStdLib('lineTo', [
|
||||
createArrayExpression([
|
||||
@ -101,7 +103,7 @@ export function updateRectangleSketch(
|
||||
.arguments[0] as ArrayExpression) = createArrayExpression([
|
||||
createBinaryExpression([
|
||||
createCallExpressionStdLib('segAng', [
|
||||
createLiteral(tag),
|
||||
createIdentifier(tag),
|
||||
createPipeSubstitution(),
|
||||
]),
|
||||
Math.sign(y) === Math.sign(x) ? '+' : '-',
|
||||
|
@ -19,8 +19,12 @@ export default function UserMenu() {
|
||||
'[data-testid="user-sidebar-toggle"] img'
|
||||
)
|
||||
|
||||
const onError = () => setAvatarErrored(true)
|
||||
if (element?.tagName === 'IMG') {
|
||||
element.addEventListener('error', () => setAvatarErrored(true))
|
||||
element?.addEventListener('error', onError)
|
||||
}
|
||||
return () => {
|
||||
element?.removeEventListener('error', onError)
|
||||
}
|
||||
}, [])
|
||||
|
||||
|
@ -5,11 +5,13 @@ import {
|
||||
_executor,
|
||||
ProgramMemory,
|
||||
programMemoryInit,
|
||||
kclLint,
|
||||
} from './lang/wasm'
|
||||
import { enginelessExecutor } from './lib/testHelpers'
|
||||
import { EngineCommandManager } from './lang/std/engineConnection'
|
||||
import { KCLError } from './lang/errors'
|
||||
import { SidebarType } from 'components/ModelingSidebar/ModelingPanes'
|
||||
import { Diagnostic } from '@codemirror/lint'
|
||||
|
||||
export type ToolTip =
|
||||
| 'lineTo'
|
||||
@ -187,3 +189,24 @@ export async function executeAst({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function lintAst({
|
||||
ast,
|
||||
}: {
|
||||
ast: Program
|
||||
}): Promise<Array<Diagnostic>> {
|
||||
try {
|
||||
const discovered_findings = await kclLint(ast)
|
||||
return discovered_findings.map((lint) => {
|
||||
return {
|
||||
message: lint.finding.title,
|
||||
severity: 'info',
|
||||
from: lint.pos[0],
|
||||
to: lint.pos[1],
|
||||
}
|
||||
})
|
||||
} catch (e: any) {
|
||||
console.log(e)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
59
src/wasm-lib/Cargo.lock
generated
@ -183,12 +183,6 @@ dependencies = [
|
||||
"syn 2.0.68",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba"
|
||||
|
||||
[[package]]
|
||||
name = "auto_impl"
|
||||
version = "1.2.0"
|
||||
@ -662,6 +656,20 @@ dependencies = [
|
||||
"parking_lot_core 0.9.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dashmap"
|
||||
version = "6.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crossbeam-utils",
|
||||
"hashbrown 0.14.3",
|
||||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core 0.9.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.5.0"
|
||||
@ -1375,7 +1383,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.1.66"
|
||||
version = "0.1.67"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx",
|
||||
@ -1387,7 +1395,7 @@ dependencies = [
|
||||
"clap",
|
||||
"convert_case",
|
||||
"criterion",
|
||||
"dashmap",
|
||||
"dashmap 6.0.1",
|
||||
"databake",
|
||||
"derive-docs",
|
||||
"expectorate",
|
||||
@ -1986,15 +1994,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.21.2"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
|
||||
checksum = "1962a33ed2a201c637fc14a4e0fd4e06e6edfdeee6a5fede0dab55507ad74cf7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"parking_lot 0.12.1",
|
||||
"once_cell",
|
||||
"portable-atomic",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
@ -2004,9 +2012,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.21.2"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
|
||||
checksum = "ab7164b2202753bd33afc7f90a10355a719aa973d1f94502c50d06f3488bc420"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@ -2014,9 +2022,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.21.2"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
|
||||
checksum = "c6424906ca49013c0829c5c1ed405e20e2da2dc78b82d198564880a704e6a7b7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@ -2024,9 +2032,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.21.2"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
|
||||
checksum = "82b2f19e153122d64afd8ce7aaa72f06a00f52e34e1d1e74b6d71baea396460a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@ -2036,11 +2044,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.21.2"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
|
||||
checksum = "dd698c04cac17cf0fe63d47790ab311b8b25542f5cb976b65c374035c50f1eef"
|
||||
dependencies = [
|
||||
"heck 0.4.1",
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
@ -2596,9 +2604,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.117"
|
||||
version = "1.0.118"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3"
|
||||
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
|
||||
dependencies = [
|
||||
"indexmap 2.2.5",
|
||||
"itoa",
|
||||
@ -3160,7 +3168,7 @@ dependencies = [
|
||||
"async-trait",
|
||||
"auto_impl",
|
||||
"bytes",
|
||||
"dashmap",
|
||||
"dashmap 5.5.3",
|
||||
"futures",
|
||||
"httparse",
|
||||
"lsp-types",
|
||||
@ -3409,11 +3417,10 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.8.0"
|
||||
version = "1.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0"
|
||||
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
|
||||
dependencies = [
|
||||
"atomic",
|
||||
"getrandom",
|
||||
"serde",
|
||||
"wasm-bindgen",
|
||||
|
@ -15,10 +15,10 @@ clap = "4.5.7"
|
||||
gloo-utils = "0.2.0"
|
||||
kcl-lib = { path = "kcl" }
|
||||
kittycad.workspace = true
|
||||
serde_json = "1.0.116"
|
||||
serde_json = "1.0.118"
|
||||
tokio = { version = "1.38.0", features = ["sync"] }
|
||||
toml = "0.8.14"
|
||||
uuid = { version = "1.8.0", features = ["v4", "js", "serde"] }
|
||||
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
||||
wasm-bindgen = "0.2.91"
|
||||
wasm-bindgen-futures = "0.4.42"
|
||||
|
||||
@ -31,7 +31,7 @@ pretty_assertions = "1.4.0"
|
||||
reqwest = { version = "0.11.26", default-features = false }
|
||||
tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros", "time"] }
|
||||
twenty-twenty = "0.8"
|
||||
uuid = { version = "1.8.0", features = ["v4", "js", "serde"] }
|
||||
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
console_error_panic_hook = "0.1.7"
|
||||
|
@ -96,10 +96,16 @@ fn do_stdlib_inner(
|
||||
}
|
||||
|
||||
if !ast.sig.generics.params.is_empty() {
|
||||
errors.push(Error::new_spanned(
|
||||
&ast.sig.generics,
|
||||
"generics are not permitted for stdlib functions",
|
||||
));
|
||||
if ast.sig.generics.params.iter().any(|generic_type| match generic_type {
|
||||
syn::GenericParam::Lifetime(_) => false,
|
||||
syn::GenericParam::Type(_) => true,
|
||||
syn::GenericParam::Const(_) => true,
|
||||
}) {
|
||||
errors.push(Error::new_spanned(
|
||||
&ast.sig.generics,
|
||||
"Stdlib functions may not be generic over types or constants, only lifetimes.",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if ast.sig.variadic.is_some() {
|
||||
@ -650,7 +656,12 @@ impl Parse for ItemFnForSignature {
|
||||
}
|
||||
|
||||
fn clean_ty_string(t: &str) -> (String, proc_macro2::TokenStream) {
|
||||
let mut ty_string = t.replace('&', "").replace("mut", "").replace(' ', "");
|
||||
let mut ty_string = t
|
||||
.replace("& 'a", "")
|
||||
.replace('&', "")
|
||||
.replace("mut", "")
|
||||
.replace("< 'a >", "")
|
||||
.replace(' ', "");
|
||||
if ty_string.starts_with("Args") {
|
||||
ty_string = "Args".to_string();
|
||||
}
|
||||
|
@ -35,6 +35,56 @@ fn test_get_inner_array_type() {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_with_refs() {
|
||||
let (item, mut errors) = do_stdlib(
|
||||
quote! {
|
||||
name = "someFn",
|
||||
},
|
||||
quote! {
|
||||
/// Docs
|
||||
/// ```
|
||||
/// someFn()
|
||||
/// ```
|
||||
fn someFn(
|
||||
data: &'a str,
|
||||
) -> i32 {
|
||||
3
|
||||
}
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
if let Some(e) = errors.pop() {
|
||||
panic!("{e}");
|
||||
}
|
||||
expectorate::assert_contents("tests/args_with_refs.gen", &get_text_fmt(&item).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_args_with_lifetime() {
|
||||
let (item, mut errors) = do_stdlib(
|
||||
quote! {
|
||||
name = "someFn",
|
||||
},
|
||||
quote! {
|
||||
/// Docs
|
||||
/// ```
|
||||
/// someFn()
|
||||
/// ```
|
||||
fn someFn<'a>(
|
||||
data: Foo<'a>,
|
||||
) -> i32 {
|
||||
3
|
||||
}
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
if let Some(e) = errors.pop() {
|
||||
panic!("{e}");
|
||||
}
|
||||
expectorate::assert_contents("tests/args_with_lifetime.gen", &get_text_fmt(&item).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stdlib_line_to() {
|
||||
let (item, errors) = do_stdlib(
|
||||
@ -64,7 +114,6 @@ fn test_stdlib_line_to() {
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
let _expected = quote! {};
|
||||
|
||||
assert!(errors.is_empty());
|
||||
expectorate::assert_contents("tests/lineTo.gen", &get_text_fmt(&item).unwrap());
|
||||
|
194
src/wasm-lib/derive-docs/tests/args_with_lifetime.gen
Normal file
@ -0,0 +1,194 @@
|
||||
#[cfg(test)]
|
||||
mod test_examples_someFn {
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_mock_example_someFn0() {
|
||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext {
|
||||
engine: std::sync::Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new()
|
||||
.await
|
||||
.unwrap(),
|
||||
)),
|
||||
fs: std::sync::Arc::new(crate::fs::FileManager::new()),
|
||||
stdlib: std::sync::Arc::new(crate::std::StdLib::new()),
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
async fn serial_test_example_someFn0() {
|
||||
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
|
||||
let http_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60));
|
||||
let ws_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60))
|
||||
.connection_verbose(true)
|
||||
.tcp_keepalive(std::time::Duration::from_secs(600))
|
||||
.http1_only();
|
||||
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
|
||||
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
|
||||
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
|
||||
client.set_base_url(addr);
|
||||
}
|
||||
|
||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
crate::executor::SourceRange::default(),
|
||||
kittycad::types::ModelingCmd::ZoomToFit {
|
||||
object_ids: Default::default(),
|
||||
padding: 0.1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let resp = ctx
|
||||
.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
crate::executor::SourceRange::default(),
|
||||
kittycad::types::ModelingCmd::TakeSnapshot {
|
||||
format: kittycad::types::ImageFormat::Png,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let output_file =
|
||||
std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
||||
if let kittycad::types::OkWebSocketResponseData::Modeling {
|
||||
modeling_response: kittycad::types::OkModelingCmdResponse::TakeSnapshot { data },
|
||||
} = &resp
|
||||
{
|
||||
std::fs::write(&output_file, &data.contents.0).unwrap();
|
||||
} else {
|
||||
panic!("Unexpected response from engine: {:?}", resp);
|
||||
}
|
||||
|
||||
let actual = image::io::Reader::open(output_file)
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
twenty_twenty::assert_image(
|
||||
&format!("tests/outputs/{}.png", "serial_test_example_someFn0"),
|
||||
&actual,
|
||||
1.0,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types, missing_docs)]
|
||||
#[doc = "Std lib function: someFn\nDocs"]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars :: JsonSchema, ts_rs :: TS)]
|
||||
#[ts(export)]
|
||||
pub(crate) struct SomeFn {}
|
||||
|
||||
#[allow(non_upper_case_globals, missing_docs)]
|
||||
#[doc = "Std lib function: someFn\nDocs"]
|
||||
pub(crate) const SomeFn: SomeFn = SomeFn {};
|
||||
fn boxed_someFn(
|
||||
args: crate::std::Args,
|
||||
) -> std::pin::Pin<
|
||||
Box<
|
||||
dyn std::future::Future<
|
||||
Output = anyhow::Result<crate::executor::MemoryItem, crate::errors::KclError>,
|
||||
> + Send,
|
||||
>,
|
||||
> {
|
||||
Box::pin(someFn(args))
|
||||
}
|
||||
|
||||
impl crate::docs::StdLibFn for SomeFn {
|
||||
fn name(&self) -> String {
|
||||
"someFn".to_string()
|
||||
}
|
||||
|
||||
fn summary(&self) -> String {
|
||||
"Docs".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
fn tags(&self) -> Vec<String> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn args(&self) -> Vec<crate::docs::StdLibFnArg> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
vec![crate::docs::StdLibFnArg {
|
||||
name: "data".to_string(),
|
||||
type_: "Foo".to_string(),
|
||||
schema: Foo::json_schema(&mut generator),
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
|
||||
fn return_value(&self) -> Option<crate::docs::StdLibFnArg> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
Some(crate::docs::StdLibFnArg {
|
||||
name: "".to_string(),
|
||||
type_: "i32".to_string(),
|
||||
schema: <i32>::json_schema(&mut generator),
|
||||
required: true,
|
||||
})
|
||||
}
|
||||
|
||||
fn unpublished(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn deprecated(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<String> {
|
||||
let code_blocks = vec!["someFn()"];
|
||||
code_blocks
|
||||
.iter()
|
||||
.map(|cb| {
|
||||
let tokens = crate::token::lexer(cb).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||
options.insert_final_newline = false;
|
||||
program.recast(&options, 0)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
}
|
||||
|
||||
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||
boxed_someFn
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn crate::docs::StdLibFn> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[doc = r" Docs"]
|
||||
#[doc = r" ```"]
|
||||
#[doc = r" someFn()"]
|
||||
#[doc = r" ```"]
|
||||
fn someFn<'a>(data: Foo<'a>) -> i32 {
|
||||
3
|
||||
}
|
194
src/wasm-lib/derive-docs/tests/args_with_refs.gen
Normal file
@ -0,0 +1,194 @@
|
||||
#[cfg(test)]
|
||||
mod test_examples_someFn {
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_mock_example_someFn0() {
|
||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext {
|
||||
engine: std::sync::Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new()
|
||||
.await
|
||||
.unwrap(),
|
||||
)),
|
||||
fs: std::sync::Arc::new(crate::fs::FileManager::new()),
|
||||
stdlib: std::sync::Arc::new(crate::std::StdLib::new()),
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
async fn serial_test_example_someFn0() {
|
||||
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
|
||||
let http_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60));
|
||||
let ws_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60))
|
||||
.connection_verbose(true)
|
||||
.tcp_keepalive(std::time::Duration::from_secs(600))
|
||||
.http1_only();
|
||||
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
|
||||
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
|
||||
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
|
||||
client.set_base_url(addr);
|
||||
}
|
||||
|
||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
crate::executor::SourceRange::default(),
|
||||
kittycad::types::ModelingCmd::ZoomToFit {
|
||||
object_ids: Default::default(),
|
||||
padding: 0.1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let resp = ctx
|
||||
.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
crate::executor::SourceRange::default(),
|
||||
kittycad::types::ModelingCmd::TakeSnapshot {
|
||||
format: kittycad::types::ImageFormat::Png,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let output_file =
|
||||
std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
||||
if let kittycad::types::OkWebSocketResponseData::Modeling {
|
||||
modeling_response: kittycad::types::OkModelingCmdResponse::TakeSnapshot { data },
|
||||
} = &resp
|
||||
{
|
||||
std::fs::write(&output_file, &data.contents.0).unwrap();
|
||||
} else {
|
||||
panic!("Unexpected response from engine: {:?}", resp);
|
||||
}
|
||||
|
||||
let actual = image::io::Reader::open(output_file)
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
twenty_twenty::assert_image(
|
||||
&format!("tests/outputs/{}.png", "serial_test_example_someFn0"),
|
||||
&actual,
|
||||
1.0,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types, missing_docs)]
|
||||
#[doc = "Std lib function: someFn\nDocs"]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars :: JsonSchema, ts_rs :: TS)]
|
||||
#[ts(export)]
|
||||
pub(crate) struct SomeFn {}
|
||||
|
||||
#[allow(non_upper_case_globals, missing_docs)]
|
||||
#[doc = "Std lib function: someFn\nDocs"]
|
||||
pub(crate) const SomeFn: SomeFn = SomeFn {};
|
||||
fn boxed_someFn(
|
||||
args: crate::std::Args,
|
||||
) -> std::pin::Pin<
|
||||
Box<
|
||||
dyn std::future::Future<
|
||||
Output = anyhow::Result<crate::executor::MemoryItem, crate::errors::KclError>,
|
||||
> + Send,
|
||||
>,
|
||||
> {
|
||||
Box::pin(someFn(args))
|
||||
}
|
||||
|
||||
impl crate::docs::StdLibFn for SomeFn {
|
||||
fn name(&self) -> String {
|
||||
"someFn".to_string()
|
||||
}
|
||||
|
||||
fn summary(&self) -> String {
|
||||
"Docs".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"".to_string()
|
||||
}
|
||||
|
||||
fn tags(&self) -> Vec<String> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn args(&self) -> Vec<crate::docs::StdLibFnArg> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
vec![crate::docs::StdLibFnArg {
|
||||
name: "data".to_string(),
|
||||
type_: "string".to_string(),
|
||||
schema: str::json_schema(&mut generator),
|
||||
required: true,
|
||||
}]
|
||||
}
|
||||
|
||||
fn return_value(&self) -> Option<crate::docs::StdLibFnArg> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
Some(crate::docs::StdLibFnArg {
|
||||
name: "".to_string(),
|
||||
type_: "i32".to_string(),
|
||||
schema: <i32>::json_schema(&mut generator),
|
||||
required: true,
|
||||
})
|
||||
}
|
||||
|
||||
fn unpublished(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn deprecated(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<String> {
|
||||
let code_blocks = vec!["someFn()"];
|
||||
code_blocks
|
||||
.iter()
|
||||
.map(|cb| {
|
||||
let tokens = crate::token::lexer(cb).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||
options.insert_final_newline = false;
|
||||
program.recast(&options, 0)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
}
|
||||
|
||||
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||
boxed_someFn
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Box<dyn crate::docs::StdLibFn> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[doc = r" Docs"]
|
||||
#[doc = r" ```"]
|
||||
#[doc = r" someFn()"]
|
||||
#[doc = r" ```"]
|
||||
fn someFn(data: &'a str) -> i32 {
|
||||
3
|
||||
}
|
@ -11,5 +11,5 @@ hyper = { version = "0.14.29", features = ["server"] }
|
||||
kcl-lib = { version = "0.1.62", path = "../kcl" }
|
||||
pico-args = "0.5.0"
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
serde_json = "1.0.117"
|
||||
serde_json = "1.0.118"
|
||||
tokio = { version = "1.38.0", features = ["macros", "rt-multi-thread"] }
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.1.66"
|
||||
version = "0.1.67"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -17,7 +17,7 @@ async-trait = "0.1.80"
|
||||
base64 = "0.22.1"
|
||||
chrono = "0.4.38"
|
||||
clap = { version = "4.5.7", default-features = false, optional = true }
|
||||
dashmap = "5.5.3"
|
||||
dashmap = "6.0.1"
|
||||
databake = { version = "0.1.8", features = ["derive"] }
|
||||
derive-docs = { version = "0.1.18", path = "../derive-docs" }
|
||||
form_urlencoded = "1.2.1"
|
||||
@ -28,18 +28,18 @@ kittycad = { workspace = true, features = ["clap"] }
|
||||
lazy_static = "1.5.0"
|
||||
mime_guess = "2.0.4"
|
||||
parse-display = "0.9.1"
|
||||
pyo3 = {version = "0.21.2", optional = true}
|
||||
pyo3 = {version = "0.22.0", optional = true}
|
||||
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
||||
ropey = "1.6.1"
|
||||
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
||||
serde = { version = "1.0.203", features = ["derive"] }
|
||||
serde_json = "1.0.116"
|
||||
serde_json = "1.0.118"
|
||||
sha2 = "0.10.8"
|
||||
thiserror = "1.0.61"
|
||||
toml = "0.8.14"
|
||||
ts-rs = { version = "9.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
uuid = { version = "1.8.0", features = ["v4", "js", "serde"] }
|
||||
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
||||
validator = { version = "0.18.1", features = ["derive"] }
|
||||
winnow = "0.5.40"
|
||||
zip = { version = "2.0.0", default-features = false }
|
||||
|
@ -1232,28 +1232,34 @@ impl CallExpression {
|
||||
source_ranges: vec![self.into()],
|
||||
})
|
||||
})?;
|
||||
let result = result.get_value()?;
|
||||
|
||||
let result = result.get_value()?;
|
||||
Ok(result)
|
||||
}
|
||||
FunctionKind::UserDefined => {
|
||||
let func = memory.get(&fn_name, self.into())?;
|
||||
let result = func
|
||||
.call_fn(fn_args, memory.clone(), ctx.clone())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let (result, global_memory_items) =
|
||||
func.call_fn(fn_args, memory.clone(), ctx.clone()).await.map_err(|e| {
|
||||
// Add the call expression to the source ranges.
|
||||
e.add_source_ranges(vec![self.into()])
|
||||
})?
|
||||
.ok_or_else(|| {
|
||||
KclError::UndefinedValue(KclErrorDetails {
|
||||
message: format!("Result of user-defined function {} is undefined", fn_name),
|
||||
source_ranges: vec![self.into()],
|
||||
})
|
||||
})?;
|
||||
|
||||
let result = result.ok_or_else(|| {
|
||||
KclError::UndefinedValue(KclErrorDetails {
|
||||
message: format!("Result of user-defined function {} is undefined", fn_name),
|
||||
source_ranges: vec![self.into()],
|
||||
})
|
||||
})?;
|
||||
let result = result.get_value()?;
|
||||
|
||||
// Add the global memory items to the memory.
|
||||
for (key, item) in global_memory_items {
|
||||
// We don't care about errors here because any collisions
|
||||
// would happened in the function call itself and already
|
||||
// errored out.
|
||||
memory.add(&key, item, self.into()).unwrap_or_default();
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
@ -2101,7 +2107,7 @@ impl ObjectExpression {
|
||||
format!(
|
||||
"{}: {}",
|
||||
prop.key.name,
|
||||
prop.value.recast(options, indentation_level + 1, is_in_pipe)
|
||||
prop.value.recast(options, indentation_level + 1, is_in_pipe).trim()
|
||||
)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
@ -3748,6 +3754,56 @@ const outsideRevolve = startSketchOn('XZ')
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recast_fn_in_object() {
|
||||
let some_program_string = r#"const bing = { yo: 55 }
|
||||
const myNestedVar = [{ prop: callExp(bing.yo) }]
|
||||
"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
let recasted = program.recast(&Default::default(), 0);
|
||||
assert_eq!(recasted, some_program_string);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recast_fn_in_array() {
|
||||
let some_program_string = r#"const bing = { yo: 55 }
|
||||
const myNestedVar = [callExp(bing.yo)]
|
||||
"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
let recasted = program.recast(&Default::default(), 0);
|
||||
assert_eq!(recasted, some_program_string);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recast_object_fn_in_array_weird_bracket() {
|
||||
let some_program_string = r#"const bing = { yo: 55 }
|
||||
const myNestedVar = [
|
||||
{
|
||||
prop: line([bing.yo, 21], sketch001)
|
||||
}
|
||||
]
|
||||
"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
let recasted = program.recast(&Default::default(), 0);
|
||||
assert_eq!(
|
||||
recasted,
|
||||
r#"const bing = { yo: 55 }
|
||||
const myNestedVar = [
|
||||
{ prop: line([bing.yo, 21], sketch001) }
|
||||
]
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recast_empty_file() {
|
||||
let some_program_string = r#""#;
|
||||
@ -3897,10 +3953,10 @@ const hole_diam = 5
|
||||
fn rectShape = (pos, w, l) => {
|
||||
const rr = startSketchOn('xy')
|
||||
|> startProfileAt([pos[0] - (w / 2), pos[1] - (l / 2)], %)
|
||||
|> lineTo([pos[0] + w / 2, pos[1] - (l / 2)], %, "edge1")
|
||||
|> lineTo([pos[0] + w / 2, pos[1] + l / 2], %, "edge2")
|
||||
|> lineTo([pos[0] - (w / 2), pos[1] + l / 2], %, "edge3")
|
||||
|> close(%, "edge4")
|
||||
|> lineTo([pos[0] + w / 2, pos[1] - (l / 2)], %,$edge1)
|
||||
|> lineTo([pos[0] + w / 2, pos[1] + l / 2], %, $edge2)
|
||||
|> lineTo([pos[0] - (w / 2), pos[1] + l / 2], %, $edge3)
|
||||
|> close(%, $edge4)
|
||||
return rr
|
||||
}
|
||||
// build the body of the focusrite scarlett solo gen 4
|
||||
@ -3910,10 +3966,10 @@ const scarlett_body = rectShape([0, 0], width, length)
|
||||
|> fillet({
|
||||
radius: radius,
|
||||
tags: [
|
||||
getEdge("edge2", %),
|
||||
getEdge("edge4", %),
|
||||
getOppositeEdge("edge2", %),
|
||||
getOppositeEdge("edge4", %)
|
||||
getEdge(edge2, %),
|
||||
getEdge(edge4, %),
|
||||
getOppositeEdge(edge2, %),
|
||||
getOppositeEdge(edge4, %)
|
||||
]
|
||||
}, %)
|
||||
// build the bracket sketch around the body
|
||||
@ -3927,14 +3983,14 @@ fn bracketSketch = (w, d, t) => {
|
||||
}
|
||||
})
|
||||
|> startProfileAt([-w / 2 - t, d + t], %)
|
||||
|> lineTo([-w / 2 - t, -t], %, "edge1")
|
||||
|> lineTo([w / 2 + t, -t], %, "edge2")
|
||||
|> lineTo([w / 2 + t, d + t], %, "edge3")
|
||||
|> lineTo([w / 2, d + t], %, "edge4")
|
||||
|> lineTo([w / 2, 0], %, "edge5")
|
||||
|> lineTo([-w / 2, 0], %, "edge6")
|
||||
|> lineTo([-w / 2, d + t], %, "edge7")
|
||||
|> close(%, "edge8")
|
||||
|> lineTo([-w / 2 - t, -t], %, $edge1)
|
||||
|> lineTo([w / 2 + t, -t], %, $edge2)
|
||||
|> lineTo([w / 2 + t, d + t], %, $edge3)
|
||||
|> lineTo([w / 2, d + t], %, $edge4)
|
||||
|> lineTo([w / 2, 0], %, $edge5)
|
||||
|> lineTo([-w / 2, 0], %, $edge6)
|
||||
|> lineTo([-w / 2, d + t], %, $edge7)
|
||||
|> close(%, $edge8)
|
||||
return s
|
||||
}
|
||||
// build the body of the bracket
|
||||
@ -3943,10 +3999,10 @@ const bracket_body = bracketSketch(width, depth, thk)
|
||||
|> fillet({
|
||||
radius: radius,
|
||||
tags: [
|
||||
getNextAdjacentEdge("edge7", %),
|
||||
getNextAdjacentEdge("edge2", %),
|
||||
getNextAdjacentEdge("edge3", %),
|
||||
getNextAdjacentEdge("edge6", %)
|
||||
getNextAdjacentEdge(edge7, %),
|
||||
getNextAdjacentEdge(edge2, %),
|
||||
getNextAdjacentEdge(edge3, %),
|
||||
getNextAdjacentEdge(edge6, %)
|
||||
]
|
||||
}, %)
|
||||
// build the tabs of the mounting bracket (right side)
|
||||
@ -4017,10 +4073,10 @@ const hole_diam = 5
|
||||
fn rectShape = (pos, w, l) => {
|
||||
const rr = startSketchOn('xy')
|
||||
|> startProfileAt([pos[0] - (w / 2), pos[1] - (l / 2)], %)
|
||||
|> lineTo([pos[0] + w / 2, pos[1] - (l / 2)], %, "edge1")
|
||||
|> lineTo([pos[0] + w / 2, pos[1] + l / 2], %, "edge2")
|
||||
|> lineTo([pos[0] - (w / 2), pos[1] + l / 2], %, "edge3")
|
||||
|> close(%, "edge4")
|
||||
|> lineTo([pos[0] + w / 2, pos[1] - (l / 2)], %, $edge1)
|
||||
|> lineTo([pos[0] + w / 2, pos[1] + l / 2], %, $edge2)
|
||||
|> lineTo([pos[0] - (w / 2), pos[1] + l / 2], %, $edge3)
|
||||
|> close(%, $edge4)
|
||||
return rr
|
||||
}
|
||||
// build the body of the focusrite scarlett solo gen 4
|
||||
@ -4030,10 +4086,10 @@ const scarlett_body = rectShape([0, 0], width, length)
|
||||
|> fillet({
|
||||
radius: radius,
|
||||
tags: [
|
||||
getEdge("edge2", %),
|
||||
getEdge("edge4", %),
|
||||
getOppositeEdge("edge2", %),
|
||||
getOppositeEdge("edge4", %)
|
||||
getEdge(edge2, %),
|
||||
getEdge(edge4, %),
|
||||
getOppositeEdge(edge2, %),
|
||||
getOppositeEdge(edge4, %)
|
||||
]
|
||||
}, %)
|
||||
// build the bracket sketch around the body
|
||||
@ -4047,14 +4103,14 @@ fn bracketSketch = (w, d, t) => {
|
||||
}
|
||||
})
|
||||
|> startProfileAt([-w / 2 - t, d + t], %)
|
||||
|> lineTo([-w / 2 - t, -t], %, "edge1")
|
||||
|> lineTo([w / 2 + t, -t], %, "edge2")
|
||||
|> lineTo([w / 2 + t, d + t], %, "edge3")
|
||||
|> lineTo([w / 2, d + t], %, "edge4")
|
||||
|> lineTo([w / 2, 0], %, "edge5")
|
||||
|> lineTo([-w / 2, 0], %, "edge6")
|
||||
|> lineTo([-w / 2, d + t], %, "edge7")
|
||||
|> close(%, "edge8")
|
||||
|> lineTo([-w / 2 - t, -t], %, $edge1)
|
||||
|> lineTo([w / 2 + t, -t], %, $edge2)
|
||||
|> lineTo([w / 2 + t, d + t], %, $edge3)
|
||||
|> lineTo([w / 2, d + t], %, $edge4)
|
||||
|> lineTo([w / 2, 0], %, $edge5)
|
||||
|> lineTo([-w / 2, 0], %, $edge6)
|
||||
|> lineTo([-w / 2, d + t], %, $edge7)
|
||||
|> close(%, $edge8)
|
||||
return s
|
||||
}
|
||||
// build the body of the bracket
|
||||
@ -4063,10 +4119,10 @@ const bracket_body = bracketSketch(width, depth, thk)
|
||||
|> fillet({
|
||||
radius: radius,
|
||||
tags: [
|
||||
getNextAdjacentEdge("edge7", %),
|
||||
getNextAdjacentEdge("edge2", %),
|
||||
getNextAdjacentEdge("edge3", %),
|
||||
getNextAdjacentEdge("edge6", %)
|
||||
getNextAdjacentEdge(edge7, %),
|
||||
getNextAdjacentEdge(edge2, %),
|
||||
getNextAdjacentEdge(edge3, %),
|
||||
getNextAdjacentEdge(edge6, %)
|
||||
]
|
||||
}, %)
|
||||
// build the tabs of the mounting bracket (right side)
|
||||
@ -4376,7 +4432,7 @@ const mySk1 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> lineTo([1, 1], %)
|
||||
// comment here
|
||||
|> lineTo([0, 1], %, 'myTag')
|
||||
|> lineTo([0, 1], %, $myTag)
|
||||
|> lineTo([1, 1], %)
|
||||
/* and
|
||||
here
|
||||
@ -4399,7 +4455,7 @@ const mySk1 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> lineTo([1, 1], %)
|
||||
// comment here
|
||||
|> lineTo([0, 1], %, 'myTag')
|
||||
|> lineTo([0, 1], %, $myTag)
|
||||
|> lineTo([1, 1], %)
|
||||
/* and
|
||||
here */
|
||||
@ -4417,12 +4473,12 @@ const mySk1 = startSketchOn('XY')
|
||||
fn test_recast_multiline_object() {
|
||||
let some_program_string = r#"const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-0.01, -0.08], %)
|
||||
|> line([0.62, 4.15], %, 'seg01')
|
||||
|> line([0.62, 4.15], %, $seg01)
|
||||
|> line([2.77, -1.24], %)
|
||||
|> angledLineThatIntersects({
|
||||
angle: 201,
|
||||
offset: -1.35,
|
||||
intersectTag: 'seg01'
|
||||
intersectTag: seg01
|
||||
}, %)
|
||||
|> line([-0.42, -1.72], %)"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
@ -4507,13 +4563,13 @@ const myAng = 40
|
||||
const myAng2 = 134
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([1, 3.82], %, 'seg01') // ln-should-get-tag
|
||||
|> line([1, 3.82], %, $seg01) // ln-should-get-tag
|
||||
|> angledLineToX([
|
||||
-angleToMatchLengthX('seg01', myVar, %),
|
||||
-angleToMatchLengthX(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||
|> angledLineToY([
|
||||
-angleToMatchLengthY('seg01', myVar, %),
|
||||
-angleToMatchLengthY(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
@ -4533,13 +4589,13 @@ const myAng = 40
|
||||
const myAng2 = 134
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([1, 3.82], %, 'seg01') // ln-should-get-tag
|
||||
|> line([1, 3.82], %, $seg01) // ln-should-get-tag
|
||||
|> angledLineToX([
|
||||
-angleToMatchLengthX('seg01', myVar, %),
|
||||
-angleToMatchLengthX(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||
|> angledLineToY([
|
||||
-angleToMatchLengthY('seg01', myVar, %),
|
||||
-angleToMatchLengthY(seg01, myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||
"#;
|
||||
@ -5183,4 +5239,38 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
||||
|
||||
assert_eq!(l.raw, "false");
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_parse_tag_named_std_lib() {
|
||||
let some_program_string = r#"startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([5, 5], %, $xLine)
|
||||
"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([76, 82])], message: "Cannot assign a tag to a reserved keyword: xLine" }"#
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_parse_empty_tag() {
|
||||
let some_program_string = r#"startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> line([5, 5], %, $)
|
||||
"#;
|
||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([57, 59])], message: "Unexpected token" }"#
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -100,6 +100,18 @@ impl ProgramMemory {
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get all TagDeclarators and TagIdentifiers in the memory.
|
||||
pub fn get_tags(&self) -> HashMap<String, MemoryItem> {
|
||||
self.root
|
||||
.values()
|
||||
.filter_map(|item| match item {
|
||||
MemoryItem::TagDeclarator(t) => Some((t.name.to_string(), item.clone())),
|
||||
MemoryItem::TagIdentifier(t) => Some((t.value.to_string(), item.clone())),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<HashMap<String, MemoryItem>>()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProgramMemory {
|
||||
@ -525,14 +537,17 @@ impl std::hash::Hash for TagIdentifier {
|
||||
}
|
||||
}
|
||||
|
||||
pub type MemoryFunction =
|
||||
fn(
|
||||
s: Vec<MemoryItem>,
|
||||
memory: ProgramMemory,
|
||||
expression: Box<FunctionExpression>,
|
||||
metadata: Vec<Metadata>,
|
||||
ctx: ExecutorContext,
|
||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Option<ProgramReturn>, KclError>> + Send>>;
|
||||
pub type MemoryFunction = fn(
|
||||
s: Vec<MemoryItem>,
|
||||
memory: ProgramMemory,
|
||||
expression: Box<FunctionExpression>,
|
||||
metadata: Vec<Metadata>,
|
||||
ctx: ExecutorContext,
|
||||
) -> std::pin::Pin<
|
||||
Box<
|
||||
dyn std::future::Future<Output = Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError>> + Send,
|
||||
>,
|
||||
>;
|
||||
|
||||
fn force_memory_function<
|
||||
F: Fn(
|
||||
@ -541,7 +556,12 @@ fn force_memory_function<
|
||||
Box<FunctionExpression>,
|
||||
Vec<Metadata>,
|
||||
ExecutorContext,
|
||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<Option<ProgramReturn>, KclError>> + Send>>,
|
||||
) -> std::pin::Pin<
|
||||
Box<
|
||||
dyn std::future::Future<Output = Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError>>
|
||||
+ Send,
|
||||
>,
|
||||
>,
|
||||
>(
|
||||
f: F,
|
||||
) -> F {
|
||||
@ -686,7 +706,7 @@ impl MemoryItem {
|
||||
args: Vec<MemoryItem>,
|
||||
memory: ProgramMemory,
|
||||
ctx: ExecutorContext,
|
||||
) -> Result<Option<ProgramReturn>, KclError> {
|
||||
) -> Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError> {
|
||||
let MemoryItem::Function { func, expression, meta } = &self else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "not a in memory function".to_string(),
|
||||
@ -1500,7 +1520,16 @@ impl ExecutorContext {
|
||||
}
|
||||
FunctionKind::UserDefined => {
|
||||
if let Some(func) = memory.clone().root.get(&fn_name) {
|
||||
let result = func.call_fn(args.clone(), memory.clone(), self.clone()).await?;
|
||||
let (result, global_memory_items) =
|
||||
func.call_fn(args.clone(), memory.clone(), self.clone()).await?;
|
||||
|
||||
// Add the global memory items to the memory.
|
||||
for (key, item) in global_memory_items {
|
||||
// We don't care about errors here because any collisions
|
||||
// would happened in the function call itself and already
|
||||
// errored out.
|
||||
memory.add(&key, item, call_expr.into()).unwrap_or_default();
|
||||
}
|
||||
|
||||
memory.return_ = result;
|
||||
} else {
|
||||
@ -1625,7 +1654,7 @@ impl ExecutorContext {
|
||||
.inner_execute(function_expression.body.clone(), &mut fn_memory, BodyType::Block)
|
||||
.await?;
|
||||
|
||||
Ok(result.return_)
|
||||
Ok((result.return_, fn_memory.get_tags()))
|
||||
})
|
||||
},
|
||||
);
|
||||
|
@ -1,4 +1,7 @@
|
||||
use crate::ast::types;
|
||||
use crate::{
|
||||
ast::{types, types::ValueMeta},
|
||||
executor::SourceRange,
|
||||
};
|
||||
|
||||
/// The "Node" type wraps all the AST elements we're able to find in a KCL
|
||||
/// file. Tokens we walk through will be one of these.
|
||||
@ -33,6 +36,34 @@ pub enum Node<'a> {
|
||||
LiteralIdentifier(&'a types::LiteralIdentifier),
|
||||
}
|
||||
|
||||
impl From<&Node<'_>> for SourceRange {
|
||||
fn from(node: &Node) -> Self {
|
||||
match node {
|
||||
Node::Program(p) => SourceRange([p.start, p.end]),
|
||||
Node::ExpressionStatement(e) => SourceRange([e.start(), e.end()]),
|
||||
Node::VariableDeclaration(v) => SourceRange([v.start(), v.end()]),
|
||||
Node::ReturnStatement(r) => SourceRange([r.start(), r.end()]),
|
||||
Node::VariableDeclarator(v) => SourceRange([v.start(), v.end()]),
|
||||
Node::Literal(l) => SourceRange([l.start(), l.end()]),
|
||||
Node::TagDeclarator(t) => SourceRange([t.start(), t.end()]),
|
||||
Node::Identifier(i) => SourceRange([i.start(), i.end()]),
|
||||
Node::BinaryExpression(b) => SourceRange([b.start(), b.end()]),
|
||||
Node::FunctionExpression(f) => SourceRange([f.start(), f.end()]),
|
||||
Node::CallExpression(c) => SourceRange([c.start(), c.end()]),
|
||||
Node::PipeExpression(p) => SourceRange([p.start(), p.end()]),
|
||||
Node::PipeSubstitution(p) => SourceRange([p.start(), p.end()]),
|
||||
Node::ArrayExpression(a) => SourceRange([a.start(), a.end()]),
|
||||
Node::ObjectExpression(o) => SourceRange([o.start(), o.end()]),
|
||||
Node::MemberExpression(m) => SourceRange([m.start(), m.end()]),
|
||||
Node::UnaryExpression(u) => SourceRange([u.start(), u.end()]),
|
||||
Node::Parameter(p) => SourceRange([p.identifier.start(), p.identifier.end()]),
|
||||
Node::ObjectProperty(o) => SourceRange([o.start(), o.end()]),
|
||||
Node::MemberObject(m) => SourceRange([m.start(), m.end()]),
|
||||
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end()]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_from {
|
||||
($node:ident, $t: ident) => {
|
||||
impl<'a> From<&'a types::$t> for Node<'a> {
|
||||
|
@ -1,4 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Serialize;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||
|
||||
use crate::{executor::SourceRange, lint::Node, lsp::IntoDiagnostic};
|
||||
@ -22,8 +24,10 @@ where
|
||||
}
|
||||
|
||||
/// Specific discovered lint rule Violation of a particular Finding.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, ts_rs::TS, Serialize, JsonSchema)]
|
||||
#[ts(export)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Discovered {
|
||||
/// Zoo Lint Finding information.
|
||||
pub finding: Finding,
|
||||
@ -63,6 +67,12 @@ impl Discovered {
|
||||
}
|
||||
|
||||
impl IntoDiagnostic for Discovered {
|
||||
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||
(&self).to_lsp_diagnostic(code)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoDiagnostic for &Discovered {
|
||||
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||
let message = self.finding.title.to_owned();
|
||||
let source_range = self.pos;
|
||||
@ -83,8 +93,10 @@ impl IntoDiagnostic for Discovered {
|
||||
}
|
||||
|
||||
/// Abstract lint problem type.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq, ts_rs::TS, Serialize, JsonSchema)]
|
||||
#[ts(export)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Finding {
|
||||
/// Unique identifier for this particular issue.
|
||||
pub code: &'static str,
|
||||
|
@ -1,6 +1,11 @@
|
||||
//! Functions for the `kcl` lsp server.
|
||||
|
||||
use std::{collections::HashMap, io::Write, str::FromStr, sync::Arc};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::Write,
|
||||
str::FromStr,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
@ -14,34 +19,59 @@ use tower_lsp::{
|
||||
jsonrpc::Result as RpcResult,
|
||||
lsp_types::{
|
||||
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
||||
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DidChangeConfigurationParams,
|
||||
DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams,
|
||||
DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticParams,
|
||||
DocumentDiagnosticReport, DocumentDiagnosticReportResult, DocumentFilter, DocumentFormattingParams,
|
||||
DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse, Documentation, FoldingRange, FoldingRangeParams,
|
||||
FoldingRangeProviderCapability, FullDocumentDiagnosticReport, Hover, HoverContents, HoverParams,
|
||||
HoverProviderCapability, InitializeParams, InitializeResult, InitializedParams, InlayHint, InlayHintParams,
|
||||
InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf, Position, RelatedFullDocumentDiagnosticReport,
|
||||
RenameFilesParams, RenameParams, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensFullOptions,
|
||||
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensRegistrationOptions,
|
||||
SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp,
|
||||
SignatureHelpOptions, SignatureHelpParams, StaticRegistrationOptions, TextDocumentItem,
|
||||
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
TextEdit, WorkDoneProgressOptions, WorkspaceEdit, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
|
||||
WorkspaceServerCapabilities,
|
||||
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
||||
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
||||
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
||||
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||
DocumentFilter, DocumentFormattingParams, DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse,
|
||||
Documentation, FoldingRange, FoldingRangeParams, FoldingRangeProviderCapability, FullDocumentDiagnosticReport,
|
||||
Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult,
|
||||
InitializedParams, InlayHint, InlayHintParams, InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf,
|
||||
Position, RelatedFullDocumentDiagnosticReport, RenameFilesParams, RenameParams, SemanticToken,
|
||||
SemanticTokenModifier, SemanticTokenType, SemanticTokens, SemanticTokensFullOptions, SemanticTokensLegend,
|
||||
SemanticTokensOptions, SemanticTokensParams, SemanticTokensRegistrationOptions, SemanticTokensResult,
|
||||
SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp, SignatureHelpOptions, SignatureHelpParams,
|
||||
StaticRegistrationOptions, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
|
||||
TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions, WorkspaceEdit,
|
||||
WorkspaceFolder, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||
},
|
||||
Client, LanguageServer,
|
||||
};
|
||||
|
||||
use super::backend::{InnerHandle, UpdateHandle};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::lint::checks;
|
||||
use crate::{
|
||||
ast::types::VariableKind,
|
||||
ast::types::{Value, VariableKind},
|
||||
executor::SourceRange,
|
||||
lint::checks,
|
||||
lsp::{backend::Backend as _, safemap::SafeMap, util::IntoDiagnostic},
|
||||
lsp::{
|
||||
backend::{Backend as _, InnerHandle, UpdateHandle},
|
||||
safemap::SafeMap,
|
||||
util::IntoDiagnostic,
|
||||
},
|
||||
parser::PIPE_OPERATOR,
|
||||
token::TokenType,
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref SEMANTIC_TOKEN_TYPES: Vec<SemanticTokenType> = {
|
||||
// This is safe to unwrap because we know all the token types are valid.
|
||||
// And the test would fail if they were not.
|
||||
let mut gen = TokenType::all_semantic_token_types().unwrap();
|
||||
gen.extend(vec![
|
||||
SemanticTokenType::PARAMETER,
|
||||
SemanticTokenType::PROPERTY,
|
||||
]);
|
||||
gen
|
||||
};
|
||||
|
||||
pub static ref SEMANTIC_TOKEN_MODIFIERS: Vec<SemanticTokenModifier> = {
|
||||
vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
/// A subcommand for running the server.
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "cli", derive(Parser))]
|
||||
@ -68,8 +98,6 @@ pub struct Backend {
|
||||
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||
/// The stdlib signatures for the language.
|
||||
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||
/// The types of tokens the server supports.
|
||||
pub token_types: Vec<SemanticTokenType>,
|
||||
/// Token maps.
|
||||
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
||||
/// AST maps.
|
||||
@ -166,14 +194,18 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
}
|
||||
|
||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
||||
self.clear_diagnostics_map(¶ms.uri).await;
|
||||
// We already updated the code map in the shared backend.
|
||||
|
||||
// Lets update the tokens.
|
||||
let tokens = match crate::token::lexer(¶ms.text) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(err) => {
|
||||
self.add_to_diagnostics(¶ms, err).await;
|
||||
self.add_to_diagnostics(¶ms, err, true).await;
|
||||
self.token_map.remove(¶ms.uri.to_string()).await;
|
||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||
self.semantic_tokens_map.remove(¶ms.uri.to_string()).await;
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -208,12 +240,15 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
}
|
||||
|
||||
// Lets update the ast.
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let parser = crate::parser::Parser::new(tokens.clone());
|
||||
let result = parser.ast();
|
||||
let ast = match result {
|
||||
Ok(ast) => ast,
|
||||
Err(err) => {
|
||||
self.add_to_diagnostics(¶ms, err).await;
|
||||
self.add_to_diagnostics(¶ms, err, true).await;
|
||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -241,6 +276,24 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
||||
)
|
||||
.await;
|
||||
|
||||
// Update our semantic tokens.
|
||||
self.update_semantic_tokens(tokens, ¶ms).await;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
let discovered_findings = ast
|
||||
.lint(checks::lint_variables)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
// Clear the lints before we lint.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
||||
.await;
|
||||
for discovered_finding in &discovered_findings {
|
||||
self.add_to_diagnostics(¶ms, discovered_finding, false).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send the notification to the client that the ast was updated.
|
||||
@ -256,13 +309,12 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
// This function automatically executes if we should & updates the diagnostics if we got
|
||||
// errors.
|
||||
if self.execute(¶ms, ast.clone()).await.is_err() {
|
||||
// if there was an issue, let's bail and avoid trying to lint.
|
||||
return;
|
||||
}
|
||||
|
||||
for discovered_finding in ast.lint(checks::lint_variables).into_iter().flatten() {
|
||||
self.add_to_diagnostics(¶ms, discovered_finding).await;
|
||||
}
|
||||
// If we made it here we can clear the diagnostics.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@ -299,14 +351,14 @@ impl Backend {
|
||||
token_type = SemanticTokenType::FUNCTION;
|
||||
}
|
||||
|
||||
let token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||
Some(index) => index,
|
||||
// This is actually bad this should not fail.
|
||||
// TODO: ensure we never get here.
|
||||
// The test for listing all semantic token types should make this never happen.
|
||||
None => {
|
||||
self.client
|
||||
.log_message(
|
||||
MessageType::INFO,
|
||||
MessageType::ERROR,
|
||||
format!("token type `{:?}` not accounted for", token_type),
|
||||
)
|
||||
.await;
|
||||
@ -317,6 +369,108 @@ impl Backend {
|
||||
let source_range: SourceRange = token.clone().into();
|
||||
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||
|
||||
// Calculate the token modifiers.
|
||||
// Get the value at the current position.
|
||||
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||
crate::lint::walk(&ast, &|node: crate::lint::Node| {
|
||||
let node_range: SourceRange = (&node).into();
|
||||
if !node_range.contains(source_range.start()) {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
|
||||
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
||||
return Ok(true);
|
||||
};
|
||||
if *mods == 0 {
|
||||
*mods = token_modifier_index;
|
||||
} else {
|
||||
*mods |= token_modifier_index;
|
||||
}
|
||||
Ok(false)
|
||||
};
|
||||
|
||||
match node {
|
||||
crate::lint::Node::TagDeclarator(_) => {
|
||||
return get_modifier(SemanticTokenModifier::DEFINITION);
|
||||
}
|
||||
crate::lint::Node::VariableDeclarator(variable) => {
|
||||
let sr: SourceRange = variable.id.clone().into();
|
||||
if sr.contains(source_range.start()) {
|
||||
if let Value::FunctionExpression(_) = &variable.init {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
}
|
||||
}
|
||||
crate::lint::Node::Parameter(_) => {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
crate::lint::Node::MemberExpression(member_expression) => {
|
||||
let sr: SourceRange = member_expression.property.clone().into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
crate::lint::Node::ObjectProperty(object_property) => {
|
||||
let sr: SourceRange = object_property.key.clone().into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
}
|
||||
crate::lint::Node::CallExpression(call_expr) => {
|
||||
let sr: SourceRange = call_expr.callee.clone().into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(true)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let t = if let Ok(guard) = token_index.lock() { *guard } else { 0 };
|
||||
token_type_index = t;
|
||||
|
||||
let m = if let Ok(guard) = modifier_index.lock() {
|
||||
*guard
|
||||
} else {
|
||||
0
|
||||
};
|
||||
m
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// We need to check if we are on the last token of the line.
|
||||
// If we are starting from the end of the last line just add 1 to the line.
|
||||
// Check if we are on the last token of the line.
|
||||
@ -328,8 +482,8 @@ impl Backend {
|
||||
delta_line: position.line - last_position.line + 1,
|
||||
delta_start: 0,
|
||||
length: token.value.len() as u32,
|
||||
token_type: token_type_index as u32,
|
||||
token_modifiers_bitset: 0,
|
||||
token_type: token_type_index,
|
||||
token_modifiers_bitset,
|
||||
};
|
||||
|
||||
semantic_tokens.push(semantic_token);
|
||||
@ -347,8 +501,8 @@ impl Backend {
|
||||
position.character - last_position.character
|
||||
},
|
||||
length: token.value.len() as u32,
|
||||
token_type: token_type_index as u32,
|
||||
token_modifiers_bitset: 0,
|
||||
token_type: token_type_index,
|
||||
token_modifiers_bitset,
|
||||
};
|
||||
|
||||
semantic_tokens.push(semantic_token);
|
||||
@ -360,7 +514,19 @@ impl Backend {
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn clear_diagnostics_map(&self, uri: &url::Url) {
|
||||
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
||||
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
|
||||
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items.clone(),
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
// If we only want to clear a specific severity, do that.
|
||||
if let Some(severity) = severity {
|
||||
items.retain(|x| x.severity != Some(severity));
|
||||
} else {
|
||||
items.clear();
|
||||
}
|
||||
|
||||
self.diagnostics_map
|
||||
.insert(
|
||||
uri.to_string(),
|
||||
@ -368,7 +534,7 @@ impl Backend {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||
result_id: None,
|
||||
items: vec![],
|
||||
items: items.clone(),
|
||||
},
|
||||
}),
|
||||
)
|
||||
@ -376,7 +542,7 @@ impl Backend {
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.client.publish_diagnostics(uri.clone(), vec![], None).await;
|
||||
self.client.publish_diagnostics(uri.clone(), items, None).await;
|
||||
}
|
||||
}
|
||||
|
||||
@ -384,6 +550,7 @@ impl Backend {
|
||||
&self,
|
||||
params: &TextDocumentItem,
|
||||
diagnostic: DiagT,
|
||||
clear_all_before_add: bool,
|
||||
) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostic))
|
||||
@ -391,6 +558,16 @@ impl Backend {
|
||||
|
||||
let diagnostic = diagnostic.to_lsp_diagnostic(¶ms.text);
|
||||
|
||||
if clear_all_before_add {
|
||||
self.clear_diagnostics_map(¶ms.uri, None).await;
|
||||
} else if diagnostic.severity == Some(DiagnosticSeverity::ERROR) {
|
||||
// If the diagnostic is an error, it will be the only error we get since that halts
|
||||
// execution.
|
||||
// Clear the diagnostics before we add a new one.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
||||
.await;
|
||||
}
|
||||
|
||||
let DocumentDiagnosticReport::Full(mut report) = self
|
||||
.diagnostics_map
|
||||
.get(params.uri.clone().as_str())
|
||||
@ -406,6 +583,19 @@ impl Backend {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
// Ensure we don't already have this diagnostic.
|
||||
if report
|
||||
.full_document_diagnostic_report
|
||||
.items
|
||||
.iter()
|
||||
.any(|x| x == &diagnostic)
|
||||
{
|
||||
self.client
|
||||
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
report.full_document_diagnostic_report.items.push(diagnostic);
|
||||
|
||||
self.diagnostics_map
|
||||
@ -439,7 +629,8 @@ impl Backend {
|
||||
let memory = match executor_ctx.run(ast, None).await {
|
||||
Ok(memory) => memory,
|
||||
Err(err) => {
|
||||
self.add_to_diagnostics(params, err).await;
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
self.add_to_diagnostics(params, err, false).await;
|
||||
|
||||
// Since we already published the diagnostics we don't really care about the error
|
||||
// string.
|
||||
@ -458,8 +649,18 @@ impl Backend {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<usize> {
|
||||
self.token_types.iter().position(|x| *x == token_type)
|
||||
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_TYPES
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.map(|y| y as u32)
|
||||
}
|
||||
|
||||
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_MODIFIERS
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.map(|y| y as u32)
|
||||
}
|
||||
|
||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||
@ -679,8 +880,8 @@ impl LanguageServer for Backend {
|
||||
semantic_tokens_options: SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: self.token_types.clone(),
|
||||
token_modifiers: vec![],
|
||||
token_types: SEMANTIC_TOKEN_TYPES.clone(),
|
||||
token_modifiers: SEMANTIC_TOKEN_MODIFIERS.clone(),
|
||||
},
|
||||
range: Some(false),
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
|
@ -5,7 +5,10 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
use pretty_assertions::assert_eq;
|
||||
use tower_lsp::LanguageServer;
|
||||
use tower_lsp::{
|
||||
lsp_types::{SemanticTokenModifier, SemanticTokenType},
|
||||
LanguageServer,
|
||||
};
|
||||
|
||||
use crate::{executor::ProgramMemory, lsp::backend::Backend};
|
||||
|
||||
@ -42,9 +45,6 @@ async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
||||
let stdlib = crate::std::StdLib::new();
|
||||
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
||||
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
||||
// We can unwrap here because we know the tokeniser is valid, since
|
||||
// we have a test for it.
|
||||
let token_types = crate::token::TokenType::all_semantic_token_types()?;
|
||||
|
||||
let zoo_client = new_zoo_client();
|
||||
|
||||
@ -63,7 +63,6 @@ async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
||||
workspace_folders: Default::default(),
|
||||
stdlib_completions,
|
||||
stdlib_signatures,
|
||||
token_types,
|
||||
token_map: Default::default(),
|
||||
ast_map: Default::default(),
|
||||
memory_map: Default::default(),
|
||||
@ -1087,6 +1086,163 @@ async fn test_kcl_lsp_semantic_tokens() {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_kcl_lsp_semantic_tokens_with_modifiers() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: r#"const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)
|
||||
|
||||
const thing = {blah: "foo"}
|
||||
const bar = thing.blah
|
||||
|
||||
fn myFn = (param1) => {
|
||||
return param1
|
||||
}"#
|
||||
.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have no diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
if !diagnostics.full_document_diagnostic_report.items.is_empty() {
|
||||
panic!(
|
||||
"Expected no diagnostics, {:?}",
|
||||
diagnostics.full_document_diagnostic_report.items
|
||||
);
|
||||
}
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the token map.
|
||||
let token_map = server.token_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(token_map != vec![]);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Send semantic tokens request.
|
||||
let semantic_tokens = server
|
||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
partial_result_params: Default::default(),
|
||||
work_done_progress_params: Default::default(),
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
// Check the semantic tokens.
|
||||
if let tower_lsp::lsp_types::SemanticTokensResult::Tokens(semantic_tokens) = semantic_tokens {
|
||||
let function_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::FUNCTION)
|
||||
.unwrap();
|
||||
let property_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::PROPERTY)
|
||||
.unwrap();
|
||||
let parameter_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::PARAMETER)
|
||||
.unwrap();
|
||||
let variable_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::VARIABLE)
|
||||
.unwrap();
|
||||
|
||||
let declaration_index = server
|
||||
.get_semantic_token_modifier_index(SemanticTokenModifier::DECLARATION)
|
||||
.unwrap();
|
||||
let definition_index = server
|
||||
.get_semantic_token_modifier_index(SemanticTokenModifier::DEFINITION)
|
||||
.unwrap();
|
||||
|
||||
// Iterate over the tokens and check the token types.
|
||||
let mut found_definition = false;
|
||||
let mut found_parameter = false;
|
||||
let mut found_property = false;
|
||||
let mut found_function_declaration = false;
|
||||
let mut found_variable_declaration = false;
|
||||
let mut found_property_declaration = false;
|
||||
for token in semantic_tokens.data {
|
||||
if token.token_modifiers_bitset == definition_index {
|
||||
found_definition = true;
|
||||
}
|
||||
|
||||
if token.token_type == parameter_index {
|
||||
found_parameter = true;
|
||||
} else if token.token_type == property_index {
|
||||
found_property = true;
|
||||
}
|
||||
|
||||
if token.token_type == function_index && token.token_modifiers_bitset == declaration_index {
|
||||
found_function_declaration = true;
|
||||
}
|
||||
|
||||
if token.token_type == variable_index && token.token_modifiers_bitset == declaration_index {
|
||||
found_variable_declaration = true;
|
||||
}
|
||||
|
||||
if token.token_type == property_index && token.token_modifiers_bitset == declaration_index {
|
||||
found_property_declaration = true;
|
||||
}
|
||||
|
||||
if found_definition
|
||||
&& found_parameter
|
||||
&& found_property
|
||||
&& found_function_declaration
|
||||
&& found_variable_declaration
|
||||
&& found_property_declaration
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_definition {
|
||||
panic!("Expected definition token");
|
||||
}
|
||||
|
||||
if !found_parameter {
|
||||
panic!("Expected parameter token");
|
||||
}
|
||||
|
||||
if !found_property {
|
||||
panic!("Expected property token");
|
||||
}
|
||||
|
||||
if !found_function_declaration {
|
||||
panic!("Expected function declaration token");
|
||||
}
|
||||
|
||||
if !found_variable_declaration {
|
||||
panic!("Expected variable declaration token");
|
||||
}
|
||||
|
||||
if !found_property_declaration {
|
||||
panic!("Expected property declaration token");
|
||||
}
|
||||
} else {
|
||||
panic!("Expected semantic tokens");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_kcl_lsp_semantic_tokens_multiple_comments() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
@ -2998,3 +3154,674 @@ async fn test_kcl_lsp_folding() {
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_with_parse_error_and_ast_unchanged_but_has_diagnostics_reparse() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
let code = r#"const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
|> ^^^things(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
|
||||
// Assure we have one diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: code.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
|
||||
// Assure we have one diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_with_lint_and_ast_unchanged_but_has_diagnostics_reparse() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Assure we have one diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: code.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Assure we have one diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_with_lint_and_parse_error_and_ast_unchanged_but_has_diagnostics_reparse() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
|> ^^^^thing(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: code.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
|
||||
// Assure we have one diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_lint_and_ast_unchanged_but_has_diagnostics_reexecute() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %, $seg01)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(ref diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 2);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: code.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 2);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_lint_reexecute_new_lint() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %, $seg01)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(ref diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 2);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: r#"const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %, $seg01)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)
|
||||
const NEW_LINT = 1"#
|
||||
.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 2);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_lint_reexecute_new_ast_error() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %, $seg01)
|
||||
|> close(%)
|
||||
|> ^^^extrude(3.14, %)"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(ref diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: r#"const part001 = startSketchOn('XY')
|
||||
|> ^^^^startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %, $seg01)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)
|
||||
const NEW_LINT = 1"#
|
||||
.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_lint_reexecute_had_lint_new_parse_error() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(ref diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Get the symbols map.
|
||||
let symbols_map = server.symbols_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(symbols_map != vec![]);
|
||||
|
||||
// Get the semantic tokens map.
|
||||
let semantic_tokens_map = server
|
||||
.semantic_tokens_map
|
||||
.get("file:///test.kcl")
|
||||
.await
|
||||
.unwrap()
|
||||
.clone();
|
||||
assert!(semantic_tokens_map != vec![]);
|
||||
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(memory != ProgramMemory::default());
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: r#"const part001 = startSketchOn('XY')
|
||||
|> ^^^^startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
|> extrude(3.14, %)
|
||||
const NEW_LINT = 1"#
|
||||
.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await;
|
||||
assert!(ast.is_none());
|
||||
|
||||
// Get the symbols map.
|
||||
let symbols_map = server.symbols_map.get("file:///test.kcl").await;
|
||||
assert!(symbols_map.is_none());
|
||||
|
||||
// Get the semantic tokens map.
|
||||
let semantic_tokens_map = server
|
||||
.semantic_tokens_map
|
||||
.get("file:///test.kcl")
|
||||
.await
|
||||
.unwrap()
|
||||
.clone();
|
||||
assert!(semantic_tokens_map != vec![]);
|
||||
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_kcl_lsp_code_lint_reexecute_had_lint_new_execution_error() {
|
||||
let server = kcl_lsp_server(true).await.unwrap();
|
||||
|
||||
let code = r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %)
|
||||
|> line([0, 20], %)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
"#;
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(ref diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
|
||||
// Get the token map.
|
||||
let token_map = server.token_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(token_map != vec![]);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Get the symbols map.
|
||||
let symbols_map = server.symbols_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(symbols_map != vec![]);
|
||||
|
||||
// Get the semantic tokens map.
|
||||
let semantic_tokens_map = server
|
||||
.semantic_tokens_map
|
||||
.get("file:///test.kcl")
|
||||
.await
|
||||
.unwrap()
|
||||
.clone();
|
||||
assert!(semantic_tokens_map != vec![]);
|
||||
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(memory != ProgramMemory::default());
|
||||
|
||||
// Send change file, but the code is the same.
|
||||
server
|
||||
.did_change(tower_lsp::lsp_types::DidChangeTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::VersionedTextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
version: 2,
|
||||
},
|
||||
content_changes: vec![tower_lsp::lsp_types::TextDocumentContentChangeEvent {
|
||||
range: None,
|
||||
range_length: None,
|
||||
text: r#"const LINT = 1
|
||||
const part001 = startSketchOn('XY')
|
||||
|> startProfileAt([-10, -10], %)
|
||||
|> line([20, 0], %, $seg01)
|
||||
|> line([0, 20], %, $seg01)
|
||||
|> line([-20, 0], %)
|
||||
|> close(%)
|
||||
"#
|
||||
.to_string(),
|
||||
}],
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Get the token map.
|
||||
let token_map = server.token_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(token_map != vec![]);
|
||||
|
||||
// Get the ast.
|
||||
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(ast != crate::ast::types::Program::default());
|
||||
|
||||
// Get the symbols map.
|
||||
let symbols_map = server.symbols_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
assert!(symbols_map != vec![]);
|
||||
|
||||
// Get the semantic tokens map.
|
||||
let semantic_tokens_map = server
|
||||
.semantic_tokens_map
|
||||
.get("file:///test.kcl")
|
||||
.await
|
||||
.unwrap()
|
||||
.clone();
|
||||
assert!(semantic_tokens_map != vec![]);
|
||||
|
||||
// Get the memory.
|
||||
let memory = server.memory_map.get("file:///test.kcl").await;
|
||||
assert!(memory.is_none());
|
||||
|
||||
// Assure we have diagnostics.
|
||||
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 2);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
}
|
||||
|
@ -912,8 +912,8 @@ fn value_allowed_in_pipe_expr(i: TokenSlice) -> PResult<Value> {
|
||||
alt((
|
||||
member_expression.map(Box::new).map(Value::MemberExpression),
|
||||
bool_value.map(Box::new).map(Value::Literal),
|
||||
literal.map(Box::new).map(Value::Literal),
|
||||
tag.map(Box::new).map(Value::TagDeclarator),
|
||||
literal.map(Box::new).map(Value::Literal),
|
||||
fn_call.map(Box::new).map(Value::CallExpression),
|
||||
identifier.map(Box::new).map(Value::Identifier),
|
||||
array.map(Box::new).map(Value::ArrayExpression),
|
||||
@ -1067,6 +1067,19 @@ impl TryFrom<Token> for TagDeclarator {
|
||||
}
|
||||
}
|
||||
|
||||
impl TagDeclarator {
|
||||
fn into_valid_binding_name(self) -> Result<Self, KclError> {
|
||||
// Make sure they are not assigning a variable to a stdlib function.
|
||||
if crate::std::name_in_stdlib(&self.name) {
|
||||
return Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![SourceRange([self.start, self.end])],
|
||||
message: format!("Cannot assign a tag to a reserved keyword: {}", self.name),
|
||||
}));
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a Kcl tag that starts with a `$`.
|
||||
fn tag(i: TokenSlice) -> PResult<TagDeclarator> {
|
||||
dollar.parse_next(i)?;
|
||||
@ -1468,7 +1481,99 @@ fn binding_name(i: TokenSlice) -> PResult<Identifier> {
|
||||
fn fn_call(i: TokenSlice) -> PResult<CallExpression> {
|
||||
let fn_name = identifier(i)?;
|
||||
let _ = terminated(open_paren, opt(whitespace)).parse_next(i)?;
|
||||
let args = arguments(i)?;
|
||||
let mut args = arguments(i)?;
|
||||
if let Some(std_fn) = crate::std::get_stdlib_fn(&fn_name.name) {
|
||||
// Type check the arguments.
|
||||
for (i, spec_arg) in std_fn.args().iter().enumerate() {
|
||||
let Some(arg) = &args.get(i) else {
|
||||
// The executor checks the number of arguments, so we don't need to check it here.
|
||||
continue;
|
||||
};
|
||||
match spec_arg.type_.as_ref() {
|
||||
"TagDeclarator" => {
|
||||
match &arg {
|
||||
Value::Identifier(_) => {
|
||||
// These are fine since we want someone to be able to map a variable to a tag declarator.
|
||||
}
|
||||
Value::TagDeclarator(tag) => {
|
||||
tag.clone()
|
||||
.into_valid_binding_name()
|
||||
.map_err(|e| ErrMode::Cut(ContextError::from(e)))?;
|
||||
}
|
||||
Value::Literal(literal) => {
|
||||
let LiteralValue::String(name) = &literal.value else {
|
||||
return Err(ErrMode::Cut(
|
||||
KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![SourceRange([arg.start(), arg.end()])],
|
||||
message: format!("Expected a tag declarator like `$name`, found {:?}", literal),
|
||||
})
|
||||
.into(),
|
||||
));
|
||||
};
|
||||
|
||||
// Convert this to a TagDeclarator.
|
||||
let tag = TagDeclarator {
|
||||
start: literal.start,
|
||||
end: literal.end,
|
||||
name: name.to_string(),
|
||||
};
|
||||
let tag = tag
|
||||
.into_valid_binding_name()
|
||||
.map_err(|e| ErrMode::Cut(ContextError::from(e)))?;
|
||||
|
||||
// Replace the literal with the tag.
|
||||
args[i] = Value::TagDeclarator(Box::new(tag));
|
||||
}
|
||||
e => {
|
||||
return Err(ErrMode::Cut(
|
||||
KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![SourceRange([arg.start(), arg.end()])],
|
||||
message: format!("Expected a tag declarator like `$name`, found {:?}", e),
|
||||
})
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
"TagIdentifier" => {
|
||||
match &arg {
|
||||
Value::Identifier(_) => {}
|
||||
Value::Literal(literal) => {
|
||||
let LiteralValue::String(name) = &literal.value else {
|
||||
return Err(ErrMode::Cut(
|
||||
KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![SourceRange([arg.start(), arg.end()])],
|
||||
message: format!("Expected a tag declarator like `$name`, found {:?}", literal),
|
||||
})
|
||||
.into(),
|
||||
));
|
||||
};
|
||||
|
||||
// Convert this to a TagDeclarator.
|
||||
let tag = Identifier {
|
||||
start: literal.start,
|
||||
end: literal.end,
|
||||
name: name.to_string(),
|
||||
};
|
||||
|
||||
// Replace the literal with the tag.
|
||||
args[i] = Value::Identifier(Box::new(tag));
|
||||
}
|
||||
e => {
|
||||
return Err(ErrMode::Cut(
|
||||
KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![SourceRange([arg.start(), arg.end()])],
|
||||
message: format!("Expected a tag identifier like `tagName`, found {:?}", e),
|
||||
})
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
let end = preceded(opt(whitespace), close_paren).parse_next(i)?.end;
|
||||
Ok(CallExpression {
|
||||
start: fn_name.start,
|
||||
|
@ -110,12 +110,11 @@ expression: actual
|
||||
"end": 65
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
"type": "Literal",
|
||||
"type": "TagDeclarator",
|
||||
"type": "TagDeclarator",
|
||||
"start": 67,
|
||||
"end": 75,
|
||||
"value": "myPath",
|
||||
"raw": "'myPath'"
|
||||
"value": "myPath"
|
||||
}
|
||||
],
|
||||
"optional": false
|
||||
@ -208,12 +207,11 @@ expression: actual
|
||||
"end": 133
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
"type": "Literal",
|
||||
"type": "TagDeclarator",
|
||||
"type": "TagDeclarator",
|
||||
"start": 135,
|
||||
"end": 146,
|
||||
"value": "rightPath",
|
||||
"raw": "'rightPath'"
|
||||
"value": "rightPath"
|
||||
}
|
||||
],
|
||||
"optional": false
|
||||
|
@ -408,7 +408,7 @@ impl From<bool> for DefaultTrue {
|
||||
#[derive(
|
||||
Debug, Default, Eq, PartialEq, Copy, Clone, Deserialize, Serialize, JsonSchema, ts_rs::TS, Display, FromStr,
|
||||
)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass(eq, eq_int))]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[display(style = "lowercase")]
|
||||
|
@ -124,6 +124,10 @@ pub fn name_in_stdlib(name: &str) -> bool {
|
||||
CORE_FNS.iter().any(|f| f.name() == name)
|
||||
}
|
||||
|
||||
pub fn get_stdlib_fn(name: &str) -> Option<Box<dyn StdLibFn>> {
|
||||
CORE_FNS.iter().find(|f| f.name() == name).cloned()
|
||||
}
|
||||
|
||||
pub struct StdLib {
|
||||
pub fns: HashMap<String, Box<dyn StdLibFn>>,
|
||||
pub kcl_fns: HashMap<String, Box<dyn KclStdLibFn>>,
|
||||
|
@ -26,7 +26,7 @@ pub async fn circle(args: Args) -> Result<MemoryItem, KclError> {
|
||||
let (center, radius, sketch_surface_or_group, tag): ([f64; 2], f64, SketchSurfaceOrGroup, Option<TagDeclarator>) =
|
||||
args.get_circle_args()?;
|
||||
|
||||
let sketch_group = inner_circle(center, radius, tag, sketch_surface_or_group, args).await?;
|
||||
let sketch_group = inner_circle(center, radius, sketch_surface_or_group, tag, args).await?;
|
||||
Ok(MemoryItem::SketchGroup(sketch_group))
|
||||
}
|
||||
|
||||
@ -55,8 +55,8 @@ pub async fn circle(args: Args) -> Result<MemoryItem, KclError> {
|
||||
async fn inner_circle(
|
||||
center: [f64; 2],
|
||||
radius: f64,
|
||||
tag: Option<TagDeclarator>,
|
||||
sketch_surface_or_group: SketchSurfaceOrGroup,
|
||||
tag: Option<TagDeclarator>,
|
||||
args: Args,
|
||||
) -> Result<Box<SketchGroup>, KclError> {
|
||||
let sketch_surface = match sketch_surface_or_group {
|
||||
|
@ -13,7 +13,7 @@ mod tokeniser;
|
||||
|
||||
/// The types of tokens.
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass(eq, eq_int))]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[display(style = "camelCase")]
|
||||
|
@ -7,7 +7,7 @@ use std::{
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
use gloo_utils::format::JsValueSerdeExt;
|
||||
use kcl_lib::{coredump::CoreDump, engine::EngineManager, executor::ExecutorSettings};
|
||||
use kcl_lib::{coredump::CoreDump, engine::EngineManager, executor::ExecutorSettings, lint::checks};
|
||||
use tower_lsp::{LspService, Server};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
@ -59,6 +59,20 @@ pub async fn execute_wasm(
|
||||
JsValue::from_serde(&memory).map_err(|e| e.to_string())
|
||||
}
|
||||
|
||||
// wasm_bindgen wrapper for execute
|
||||
#[wasm_bindgen]
|
||||
pub async fn kcl_lint(program_str: &str) -> Result<JsValue, String> {
|
||||
console_error_panic_hook::set_once();
|
||||
|
||||
let program: kcl_lib::ast::types::Program = serde_json::from_str(program_str).map_err(|e| e.to_string())?;
|
||||
let mut findings = vec![];
|
||||
for discovered_finding in program.lint(checks::lint_variables).into_iter().flatten() {
|
||||
findings.push(discovered_finding);
|
||||
}
|
||||
|
||||
Ok(JsValue::from_serde(&findings).map_err(|e| e.to_string())?)
|
||||
}
|
||||
|
||||
// wasm_bindgen wrapper for creating default planes
|
||||
#[wasm_bindgen]
|
||||
pub async fn make_default_planes(
|
||||
@ -221,9 +235,6 @@ pub async fn kcl_lsp_run(
|
||||
let stdlib = kcl_lib::std::StdLib::new();
|
||||
let stdlib_completions = kcl_lib::lsp::kcl::get_completions_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||
let stdlib_signatures = kcl_lib::lsp::kcl::get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||
// We can unwrap here because we know the tokeniser is valid, since
|
||||
// we have a test for it.
|
||||
let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
|
||||
|
||||
let mut zoo_client = kittycad::Client::new(token);
|
||||
zoo_client.set_base_url(baseurl.as_str());
|
||||
@ -273,7 +284,6 @@ pub async fn kcl_lsp_run(
|
||||
workspace_folders: Default::default(),
|
||||
stdlib_completions,
|
||||
stdlib_signatures,
|
||||
token_types,
|
||||
token_map: Default::default(),
|
||||
ast_map: Default::default(),
|
||||
memory_map: Default::default(),
|
||||
|
149
src/wasm-lib/tests/executor/inputs/global-tags.kcl
Normal file
@ -0,0 +1,149 @@
|
||||
// A mounting bracket for the Focusrite Scarlett Solo audio interface
|
||||
// This is a bracket that holds an audio device underneath a desk or shelf. The audio device has dimensions of 144mm wide, 80mm length and 45mm depth with fillets of 6mm. This mounting bracket is designed to be 3D printed with PLA material
|
||||
|
||||
// define constants in mm
|
||||
const radius = 6.0
|
||||
const width = 144.0
|
||||
const length = 80.0
|
||||
const depth = 45.0
|
||||
const thk = 4
|
||||
const holeDiam = 5
|
||||
const tabLength = 25
|
||||
const tabWidth = 12
|
||||
const tabThk = 4
|
||||
|
||||
// define a rectangular shape func
|
||||
fn rectShape = (pos, w, l) => {
|
||||
const rr = startSketchOn('xy')
|
||||
|> startProfileAt([pos[0] - (w / 2), pos[1] - (l / 2)], %)
|
||||
|> lineTo([pos[0] + w / 2, pos[1] - (l / 2)], %, "edge01")
|
||||
|> lineTo([pos[0] + w / 2, pos[1] + l / 2], %, "edge02")
|
||||
|> lineTo([pos[0] - (w / 2), pos[1] + l / 2], %, "edge03")
|
||||
|> close(%, "edge04")
|
||||
return rr
|
||||
}
|
||||
|
||||
// define the bracket plane
|
||||
const bracketPlane = {
|
||||
plane: {
|
||||
origin: { x: 0, y: length / 2 + thk, z: 0 },
|
||||
x_axis: { x: 1, y: 0, z: 0 },
|
||||
y_axis: { x: 0, y: 0, z: 1 },
|
||||
z_axis: { x: 0, y: -1, z: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
// build the bracket sketch around the body
|
||||
fn bracketSketch = (w, d, t) => {
|
||||
const s = startSketchOn(bracketPlane)
|
||||
|> startProfileAt([-w / 2 - t, d + t], %)
|
||||
|> lineTo([-w / 2 - t, -t], %, "edge1")
|
||||
|> lineTo([w / 2 + t, -t], %, "edge2")
|
||||
|> lineTo([w / 2 + t, d + t], %, "edge3")
|
||||
|> lineTo([w / 2, d + t], %, "edge4")
|
||||
|> lineTo([w / 2, 0], %, "edge5")
|
||||
|> lineTo([-w / 2, 0], %, "edge6")
|
||||
|> lineTo([-w / 2, d + t], %, "edge7")
|
||||
|> close(%, "edge8")
|
||||
return s
|
||||
}
|
||||
|
||||
// build the body of the bracket
|
||||
const bracketBody = bracketSketch(width, depth, thk)
|
||||
|> extrude(length + 2 * thk, %)
|
||||
|> fillet({
|
||||
radius: radius,
|
||||
tags: [
|
||||
getNextAdjacentEdge("edge7", %),
|
||||
getNextAdjacentEdge("edge2", %),
|
||||
getNextAdjacentEdge("edge3", %),
|
||||
getNextAdjacentEdge("edge6", %)
|
||||
]
|
||||
}, %)
|
||||
|
||||
// define the tab plane
|
||||
const tabPlane = {
|
||||
plane: {
|
||||
origin: { x: 0, y: 0, z: depth + thk },
|
||||
x_axis: { x: 1, y: 0, z: 0 },
|
||||
y_axis: { x: 0, y: 1, z: 0 },
|
||||
z_axis: { x: 0, y: 0, z: 1 }
|
||||
}
|
||||
}
|
||||
|
||||
// build the tabs of the mounting bracket (right side)
|
||||
const tabsR = startSketchOn(tabPlane)
|
||||
|> startProfileAt([width / 2 + thk, length / 2 + thk], %)
|
||||
|> line([tabWidth, -tabLength / 3], %, "edge11")
|
||||
|> line([0, -tabLength / 3 * 2], %, "edge12")
|
||||
|> line([-tabWidth, -tabLength / 3], %, "edge13")
|
||||
|> close(%, "edge14")
|
||||
|> hole(circle([
|
||||
width / 2 + thk + tabWidth / 2,
|
||||
length / 2 + thk - (tabLength / (3 / 2))
|
||||
], holeDiam / 2, %), %)
|
||||
|> extrude(-tabThk, %)
|
||||
|> fillet({
|
||||
radius: holeDiam / 2,
|
||||
tags: [
|
||||
getNextAdjacentEdge("edge12", %),
|
||||
getNextAdjacentEdge("edge13", %)
|
||||
]
|
||||
}, %)
|
||||
|> patternLinear3d({
|
||||
axis: [0, -1, 0],
|
||||
repetitions: 1,
|
||||
distance: length + 2 * thk - (tabLength * 4 / 3)
|
||||
}, %)
|
||||
|
||||
// build the tabs of the mounting bracket (left side)
|
||||
const tabsL = startSketchOn(tabPlane)
|
||||
|> startProfileAt([-width / 2 - thk, length / 2 + thk], %)
|
||||
|> line([-tabWidth, -tabLength / 3], %, "edge21")
|
||||
|> line([0, -tabLength / 3 * 2], %, "edge22")
|
||||
|> line([tabWidth, -tabLength / 3], %, "edge23")
|
||||
|> close(%, "edge24")
|
||||
|> hole(circle([
|
||||
-width / 2 - thk - (tabWidth / 2),
|
||||
length / 2 + thk - (tabLength / (3 / 2))
|
||||
], holeDiam / 2, %), %)
|
||||
|> extrude(-tabThk, %)
|
||||
|> fillet({
|
||||
radius: holeDiam / 2,
|
||||
tags: [
|
||||
getNextAdjacentEdge("edge21", %),
|
||||
getNextAdjacentEdge("edge22", %)
|
||||
]
|
||||
}, %)
|
||||
|> patternLinear3d({
|
||||
axis: [0, -1, 0],
|
||||
repetitions: 1,
|
||||
distance: length + 2 * thk - (tabLength * 4 / 3)
|
||||
}, %)
|
||||
|
||||
// define a plane for retention bumps
|
||||
const retPlane = {
|
||||
plane: {
|
||||
origin: { x: -width / 2 + 20, y: 0, z: 0 },
|
||||
x_axis: { x: 0, y: 1, z: 0 },
|
||||
y_axis: { x: 0, y: 0, z: 1 },
|
||||
z_axis: { x: 1, y: 0, z: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
// build the retention bump in the front
|
||||
const retFront = startSketchOn(retPlane)
|
||||
|> startProfileAt([-length / 2 - thk, 0], %)
|
||||
|> line([0, thk], %)
|
||||
|> line([thk, -thk], %)
|
||||
|> close(%)
|
||||
|> extrude(width - 40, %)
|
||||
|
||||
// build the retention bump in the back
|
||||
const retBack = startSketchOn(retPlane)
|
||||
|> startProfileAt([length / 2 + thk, 0], %)
|
||||
|> line([0, thk], %)
|
||||
|> line([-thk, 0], %)
|
||||
|> line([0, -thk], %)
|
||||
|> close(%)
|
||||
|> extrude(width - 40, %)
|
@ -1,41 +1,37 @@
|
||||
fn triangle = (len) => {
|
||||
return startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> angledLine({angle: 60, length: len}, %, 'a')
|
||||
|> angledLine({angle: 180, length: len}, %, 'b')
|
||||
|> angledLine({angle: 300, length: len}, %, 'c')
|
||||
}
|
||||
|
||||
let triangleHeight = 200
|
||||
let plumbusLen = 100
|
||||
let radius = 80
|
||||
let circ = {angle_start: 0, angle_end: 360, radius: radius}
|
||||
|
||||
let triangleLen = 500
|
||||
const p = triangle(triangleLen)
|
||||
const p = startSketchOn('XY')
|
||||
|> startProfileAt([0, 0], %)
|
||||
|> angledLine({angle: 60, length:triangleLen}, %, $a)
|
||||
|> angledLine({angle: 180, length: triangleLen}, %, $b)
|
||||
|> angledLine({angle: 300, length: triangleLen}, %, $c)
|
||||
|> extrude(triangleHeight, %)
|
||||
|
||||
fn circl = (x, tag) => {
|
||||
return startSketchOn(p, tag)
|
||||
fn circl = (x, face, tag) => {
|
||||
return startSketchOn(p, face)
|
||||
|> startProfileAt([x + radius, triangleHeight/2], %)
|
||||
|> arc(circ, %, 'arc-' + tag)
|
||||
|> arc(circ, %, tag)
|
||||
|> close(%)
|
||||
}
|
||||
|
||||
const plumbus1 =
|
||||
circl(-200, 'c')
|
||||
circl(-200,c, $arc_c)
|
||||
|> extrude(plumbusLen, %)
|
||||
|> fillet({
|
||||
radius: 5,
|
||||
tags: ['arc-c', getOppositeEdge('arc-c', %)]
|
||||
tags: [arc_c, getOppositeEdge(arc_c, %)]
|
||||
}, %)
|
||||
|
||||
const plumbus0 =
|
||||
circl(200, 'a')
|
||||
circl(200, a, $arc_a)
|
||||
|> extrude(plumbusLen, %)
|
||||
|> fillet({
|
||||
radius: 5,
|
||||
tags: ['arc-a', getOppositeEdge('arc-a', %)]
|
||||
tags: [arc_a, getOppositeEdge(arc_a, %)]
|
||||
}, %)
|
||||
|
||||
|
||||
|
@ -1731,58 +1731,58 @@ const part002 = startSketchOn(part001, 'end')
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_plumbus_fillets() {
|
||||
let code = r#"fn make_circle = (face, tag, pos, radius) => {
|
||||
const sg = startSketchOn(face, tag)
|
||||
let code = r#"fn make_circle = (ext, face, tag ,pos, radius) => {
|
||||
const sg = startSketchOn(ext, face)
|
||||
|> startProfileAt([pos[0] + radius, pos[1]], %)
|
||||
|> arc({
|
||||
angle_end: 360,
|
||||
angle_start: 0,
|
||||
radius: radius
|
||||
}, %, 'arc-' + tag)
|
||||
}, %, tag)
|
||||
|> close(%)
|
||||
|
||||
return sg
|
||||
}
|
||||
|
||||
fn pentagon = (len) => {
|
||||
fn pentagon = (len, taga, tagb, tagc) => {
|
||||
const sg = startSketchOn('XY')
|
||||
|> startProfileAt([-len / 2, -len / 2], %)
|
||||
|> angledLine({ angle: 0, length: len }, %, 'a')
|
||||
|> angledLine({ angle: 0, length: len }, %,taga)
|
||||
|> angledLine({
|
||||
angle: segAng('a', %) + 180 - 108,
|
||||
angle: segAng(a, %) + 180 - 108,
|
||||
length: len
|
||||
}, %, 'b')
|
||||
}, %, tagb)
|
||||
|> angledLine({
|
||||
angle: segAng('b', %) + 180 - 108,
|
||||
angle: segAng(b, %) + 180 - 108,
|
||||
length: len
|
||||
}, %, 'c')
|
||||
}, %,tagc)
|
||||
|> angledLine({
|
||||
angle: segAng('c', %) + 180 - 108,
|
||||
angle: segAng(c, %) + 180 - 108,
|
||||
length: len
|
||||
}, %, 'd')
|
||||
}, %, $d)
|
||||
|> angledLine({
|
||||
angle: segAng('d', %) + 180 - 108,
|
||||
angle: segAng(d, %) + 180 - 108,
|
||||
length: len
|
||||
}, %)
|
||||
|
||||
return sg
|
||||
}
|
||||
|
||||
const p = pentagon(32)
|
||||
const p = pentagon(32, $a, $b, $c)
|
||||
|> extrude(10, %)
|
||||
|
||||
const plumbus0 = make_circle(p, 'a', [0, 0], 2.5)
|
||||
const plumbus0 = make_circle(p,a, $arc_a, [0, 0], 2.5)
|
||||
|> extrude(10, %)
|
||||
|> fillet({
|
||||
radius: 0.5,
|
||||
tags: ['arc-a', getOppositeEdge('arc-a', %)]
|
||||
tags: [arc_a, getOppositeEdge(arc_a, %)]
|
||||
}, %)
|
||||
|
||||
const plumbus1 = make_circle(p, 'b', [0, 0], 2.5)
|
||||
const plumbus1 = make_circle(p, b,$arc_b, [0, 0], 2.5)
|
||||
|> extrude(10, %)
|
||||
|> fillet({
|
||||
radius: 0.5,
|
||||
tags: ['arc-b', getOppositeEdge('arc-b', %)]
|
||||
tags: [arc_b, getOppositeEdge(arc_b, %)]
|
||||
}, %)
|
||||
"#;
|
||||
|
||||
@ -2457,3 +2457,10 @@ let p = triangle(200)
|
||||
r#"value already defined: KclErrorDetails { source_ranges: [SourceRange([317, 319]), SourceRange([332, 345])], message: "Cannot redefine `a`" }"#
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn serial_test_global_tags() {
|
||||
let code = include_str!("inputs/global-tags.kcl");
|
||||
let result = execute_and_snapshot(code, UnitLength::Mm).await.unwrap();
|
||||
twenty_twenty::assert_image("tests/executor/outputs/global_tags.png", &result, 0.999);
|
||||
}
|
||||
|