Compare commits

..

14 Commits

Author SHA1 Message Date
f46edcddf3 remove the copies everywhere
Signed-off-by: Jess Frazelle <github@jessfraz.com>
2024-06-29 16:31:01 -07:00
68fd921a64 playw tweaks (#2845) 2024-06-30 06:10:54 +10:00
a20e710e8f playw tweaks (#2843)
unused
2024-06-29 11:53:47 -07:00
9daf2d7794 make delete key work for solids (#2752)
* failing test

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* failing test

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* push up progress

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* improve travers

* basic deleteFromSelection

* remove .only

* delete depended on extrude

* fix

* fix selection override

* add selection test

* Revert "add selection test"

This reverts commit 40a414b612.

* Revert "fix selection override"

This reverts commit 68e66e2980.

* more progress

* add toast message when we're not able to delet

* add e2e tests

* tweak test timeout

* more test tweaks

* fix back space cmd bar conflic

* clean up

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
Co-authored-by: Kurt Hutten Irev-Dev <k.hutten@protonmail.ch>
2024-06-30 03:36:04 +10:00
f86473d13b Call core dump from the bug reporting button(s) (#2783)
*  Add coredump to refresh button - this one indicates that there should be something like a core dump that is triggered.
* Added lower right control bug report button - included custom toasts for bug reporting, supports fallback bug reporting when app cannot generate a core dump
2024-06-28 18:06:40 -07:00
6fccc68c18 make release builds in prod (#2839)
Update package.json
2024-06-28 12:36:02 -07:00
ade66d0876 Bump ts-rs from 9.0.0 to 9.0.1 in /src/wasm-lib (#2837)
Bumps [ts-rs](https://github.com/Aleph-Alpha/ts-rs) from 9.0.0 to 9.0.1.
- [Release notes](https://github.com/Aleph-Alpha/ts-rs/releases)
- [Changelog](https://github.com/Aleph-Alpha/ts-rs/blob/main/CHANGELOG.md)
- [Commits](https://github.com/Aleph-Alpha/ts-rs/commits)

---
updated-dependencies:
- dependency-name: ts-rs
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-06-27 23:34:32 -07:00
b5f3a067ee Selections bug (#2836)
* fix selection override

* add selection test

* fix playwright tests
2024-06-28 14:40:59 +10:00
bb9d24f821 Transformable patterns (#2824) 2024-06-27 22:20:51 -05:00
bd3cd97d74 move back to using dashmap and cleanup heaps of code (#2834)
* more

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixups

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* everything pre mutex locks

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* remove clones

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* another clone

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* iupdates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* progress

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* more fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* cleanup

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* test-utils

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* all features

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* better naming

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* upates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2024-06-27 15:43:49 -07:00
1b5839a7f8 More semantic tokens modifiers (#2823)
* more semantic tokens

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* remove closed

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* ficxes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* nuke more

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix wasm

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2024-06-26 14:51:47 -07:00
a9e480f0ed Move walk handlers out of lint (#2822)
I want to make it more useful and generally applicable. I think in the
future we'll need a &mut variant, or an in-place tree replacer.
2024-06-26 16:32:30 -04:00
63fa04608c update onboarding KCL (#2820) 2024-06-26 13:09:53 -07:00
0d4d7fa751 Only show one error at once (#2801)
* Do not show more than one error toast at a time

* use sha as file upload id

* again

* again

* again

* again

* fmt

* Hopefully fix flakiness

* move to macos-14-large

---------

Co-authored-by: Paul Tagliamonte <paul@zoo.dev>
Co-authored-by: Paul R. Tagliamonte <paul@kittycad.io>
2024-06-26 11:04:23 -07:00
89 changed files with 7060 additions and 1681 deletions

View File

@ -38,5 +38,7 @@ jobs:
- name: Benchmark kcl library
shell: bash
run: |-
cd src/wasm-lib/kcl; cargo bench -- iai
cd src/wasm-lib/kcl; cargo bench --all-features -- iai
env:
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN}}

View File

@ -38,6 +38,8 @@ jobs:
runs-on: ubuntu-latest-8-cores
needs: check-rust-changes
steps:
- name: Tune GitHub-hosted runner network
uses: smorimoto/tune-github-hosted-runner-network@v1
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
@ -90,14 +92,17 @@ jobs:
- name: build web
run: yarn build:local
- name: Run ubuntu/chrome snapshots
continue-on-error: true
run: |
yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
# remove test-results, messes with retry logic
rm -r test-results
env:
CI: true
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }}
- name: Clean up test-results
if: always()
continue-on-error: true
run: rm -r test-results
- name: check for changes
id: git-check
run: |
@ -124,7 +129,7 @@ jobs:
- uses: actions/upload-artifact@v4
if: steps.git-check.outputs.modified == 'true'
with:
name: playwright-report-ubuntu
name: playwright-report-ubuntu-${{ github.sha }}
path: playwright-report/
retention-days: 30
# if have previous run results, use them
@ -132,7 +137,7 @@ jobs:
if: always()
continue-on-error: true
with:
name: test-results-ubuntu
name: test-results-ubuntu-${{ github.sha }}
path: test-results/
- name: Run ubuntu/chrome flow retry failures
id: retry
@ -158,23 +163,25 @@ jobs:
- uses: actions/upload-artifact@v4
if: always()
with:
name: test-results-ubuntu
name: test-results-ubuntu-${{ github.sha }}
path: test-results/
retention-days: 30
overwrite: true
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-ubuntu
name: playwright-report-ubuntu-${{ github.sha }}
path: playwright-report/
retention-days: 30
overwrite: true
playwright-macos:
timeout-minutes: 60
runs-on: macos-14
runs-on: macos-14-large
needs: check-rust-changes
steps:
- name: Tune GitHub-hosted runner network
uses: smorimoto/tune-github-hosted-runner-network@v1
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
@ -232,7 +239,7 @@ jobs:
if: ${{ always() }}
continue-on-error: true
with:
name: test-results-macos
name: test-results-macos-${{ github.sha }}
path: test-results/
- name: Run macos/safari flow retry failures
id: retry
@ -260,14 +267,14 @@ jobs:
- uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: test-results-macos
name: test-results-macos-${{ github.sha }}
path: test-results/
retention-days: 30
overwrite: true
- uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: playwright-report-macos
name: playwright-report-macos-${{ github.sha }}
path: playwright-report/
retention-days: 30
overwrite: true

View File

@ -55,6 +55,7 @@ layout: manual
* [`patternCircular3d`](kcl/patternCircular3d)
* [`patternLinear2d`](kcl/patternLinear2d)
* [`patternLinear3d`](kcl/patternLinear3d)
* [`patternTransform`](kcl/patternTransform)
* [`pi`](kcl/pi)
* [`pow`](kcl/pow)
* [`profileStart`](kcl/profileStart)

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -1214,12 +1214,18 @@ test('Auto complete works', async ({ page }) => {
await page.waitForTimeout(100)
// press arrow down twice then enter to accept xLine
await page.keyboard.press('ArrowDown')
await page.waitForTimeout(100)
await page.keyboard.press('ArrowDown')
await page.waitForTimeout(100)
await page.keyboard.press('Enter')
await page.waitForTimeout(100)
// finish line with comment
await page.keyboard.type('5')
await page.waitForTimeout(100)
await page.keyboard.press('Tab')
await page.waitForTimeout(100)
await page.keyboard.press('Tab')
await page.waitForTimeout(100)
await page.keyboard.type(' // lin')
await page.waitForTimeout(100)
// there shouldn't be any auto complete options for 'lin' in the comment
@ -1689,6 +1695,7 @@ test.describe('Onboarding tests', () => {
})
test.describe('Testing selections', () => {
test.setTimeout(90_000)
test('Selections work on fresh and edited sketch', async ({ page }) => {
// tests mapping works on fresh sketch and edited sketch
// tests using hovers which is the same as selections, because if
@ -1894,6 +1901,239 @@ test.describe('Testing selections', () => {
await selectionSequence()
})
test('Solids should be select and deletable', async ({ page }) => {
test.setTimeout(90_000)
const u = await getUtils(page)
await page.addInitScript(async () => {
localStorage.setItem(
'persistCode',
`const sketch001 = startSketchOn('XZ')
|> startProfileAt([-79.26, 95.04], %)
|> line([112.54, 127.64], %, $seg02)
|> line([170.36, -121.61], %, $seg01)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude001 = extrude(50, sketch001)
const sketch005 = startSketchOn(extrude001, 'END')
|> startProfileAt([23.24, 136.52], %)
|> line([-8.44, 36.61], %)
|> line([49.4, 2.05], %)
|> line([29.69, -46.95], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const sketch003 = startSketchOn(extrude001, seg01)
|> startProfileAt([21.23, 17.81], %)
|> line([51.97, 21.32], %)
|> line([4.07, -22.75], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const sketch002 = startSketchOn(extrude001, seg02)
|> startProfileAt([-100.54, 16.99], %)
|> line([0, 20.03], %)
|> line([62.61, 0], %, $seg03)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude002 = extrude(50, sketch002)
const sketch004 = startSketchOn(extrude002, seg03)
|> startProfileAt([57.07, 134.77], %)
|> line([-4.72, 22.84], %)
|> line([28.8, 6.71], %)
|> line([9.19, -25.33], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude003 = extrude(20, sketch004)
const pipeLength = 40
const pipeSmallDia = 10
const pipeLargeDia = 20
const thickness = 0.5
const part009 = startSketchOn('XY')
|> startProfileAt([pipeLargeDia - (thickness / 2), 38], %)
|> line([thickness, 0], %)
|> line([0, -1], %)
|> angledLineToX({
angle: 60,
to: pipeSmallDia + thickness
}, %)
|> line([0, -pipeLength], %)
|> angledLineToX({
angle: -60,
to: pipeLargeDia + thickness
}, %)
|> line([0, -1], %)
|> line([-thickness, 0], %)
|> line([0, 1], %)
|> angledLineToX({ angle: 120, to: pipeSmallDia }, %)
|> line([0, pipeLength], %)
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|> close(%)
const rev = revolve({ axis: 'y' }, part009)
`
)
}, KCL_DEFAULT_LENGTH)
await page.setViewportSize({ width: 1000, height: 500 })
await page.goto('/')
await u.waitForAuthSkipAppStart()
await u.openDebugPanel()
await u.expectCmdLog('[data-message-type="execution-done"]')
await u.closeDebugPanel()
await u.openAndClearDebugPanel()
await u.sendCustomCmd({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_look_at',
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
center: { x: -2206.68, y: -1298.36, z: 60 },
up: { x: 0, y: 0, z: 1 },
},
})
await page.waitForTimeout(100)
await u.sendCustomCmd({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_get_settings',
},
})
await page.waitForTimeout(100)
const revolve = { x: 646, y: 248 }
const parentExtrude = { x: 915, y: 133 }
const solid2d = { x: 770, y: 167 }
// DELETE REVOLVE
await page.mouse.click(revolve.x, revolve.y)
await page.waitForTimeout(100)
await expect(page.locator('.cm-activeLine')).toHaveText(
'|> line([0, -pipeLength], %)'
)
await u.clearCommandLogs()
await page.keyboard.press('Backspace')
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
await page.waitForTimeout(200)
await expect(u.codeLocator).not.toContainText(
`const rev = revolve({ axis: 'y' }, part009)`
)
// DELETE PARENT EXTRUDE
await page.mouse.click(parentExtrude.x, parentExtrude.y)
await page.waitForTimeout(100)
await expect(page.locator('.cm-activeLine')).toHaveText(
'|> line([170.36, -121.61], %, $seg01)'
)
await u.clearCommandLogs()
await page.keyboard.press('Backspace')
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
await page.waitForTimeout(200)
await expect(u.codeLocator).not.toContainText(
`const extrude001 = extrude(50, sketch001)`
)
await expect(u.codeLocator).toContainText(`const sketch005 = startSketchOn({
plane: {
origin: { x: 0, y: -50, z: 0 },
x_axis: { x: 1, y: 0, z: 0 },
y_axis: { x: 0, y: 0, z: 1 },
z_axis: { x: 0, y: -1, z: 0 }
}
})`)
await expect(u.codeLocator).toContainText(`const sketch003 = startSketchOn({
plane: {
origin: { x: 116.53, y: 0, z: 163.25 },
x_axis: { x: -0.81, y: 0, z: 0.58 },
y_axis: { x: 0, y: -1, z: 0 },
z_axis: { x: 0.58, y: 0, z: 0.81 }
}
})`)
await expect(u.codeLocator).toContainText(`const sketch002 = startSketchOn({
plane: {
origin: { x: -91.74, y: 0, z: 80.89 },
x_axis: { x: -0.66, y: 0, z: -0.75 },
y_axis: { x: 0, y: -1, z: 0 },
z_axis: { x: -0.75, y: 0, z: 0.66 }
}
})`)
// DELETE SOLID 2D
await page.mouse.click(solid2d.x, solid2d.y)
await page.waitForTimeout(100)
await expect(page.locator('.cm-activeLine')).toHaveText(
'|> startProfileAt([23.24, 136.52], %)'
)
await u.clearCommandLogs()
await page.keyboard.press('Backspace')
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
await page.waitForTimeout(200)
await expect(u.codeLocator).not.toContainText(
`const sketch005 = startSketchOn({`
)
})
test("Deleting solid that the AST mod can't handle results in a toast message", async ({
page,
}) => {
const u = await getUtils(page)
await page.addInitScript(async () => {
localStorage.setItem(
'persistCode',
`const sketch001 = startSketchOn('XZ')
|> startProfileAt([-79.26, 95.04], %)
|> line([112.54, 127.64], %, $seg02)
|> line([170.36, -121.61], %, $seg01)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude001 = extrude(50, sketch001)
const launderExtrudeThroughVar = extrude001
const sketch002 = startSketchOn(launderExtrudeThroughVar, seg02)
|> startProfileAt([-100.54, 16.99], %)
|> line([0, 20.03], %)
|> line([62.61, 0], %, $seg03)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
`
)
}, KCL_DEFAULT_LENGTH)
await page.setViewportSize({ width: 1000, height: 500 })
await page.goto('/')
await u.waitForAuthSkipAppStart()
await u.openDebugPanel()
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
await u.closeDebugPanel()
await u.openAndClearDebugPanel()
await u.sendCustomCmd({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_look_at',
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
center: { x: -2206.68, y: -1298.36, z: 60 },
up: { x: 0, y: 0, z: 1 },
},
})
await page.waitForTimeout(100)
await u.sendCustomCmd({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_get_settings',
},
})
await page.waitForTimeout(100)
// attempt delete
await page.mouse.click(930, 139)
await page.waitForTimeout(100)
await expect(page.locator('.cm-activeLine')).toHaveText(
'|> line([170.36, -121.61], %, $seg01)'
)
await u.clearCommandLogs()
await page.keyboard.press('Backspace')
await expect(page.getByText('Unable to delete part')).toBeVisible()
})
test('Hovering over 3d features highlights code', async ({ page }) => {
const u = await getUtils(page)
await page.addInitScript(async (KCL_DEFAULT_LENGTH) => {
@ -2121,6 +2361,104 @@ const part001 = startSketchOn('XZ')
)
}
})
test("Hovering and selection of extruded faces works, and is not overridden shortly after user's click", async ({
page,
}) => {
await page.addInitScript(async () => {
localStorage.setItem(
'persistCode',
`const sketch001 = startSketchOn('XZ')
|> startProfileAt([-79.26, 95.04], %)
|> line([112.54, 127.64], %)
|> line([170.36, -121.61], %, $seg01)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude001 = extrude(50, sketch001)
`
)
})
const u = await getUtils(page)
await page.setViewportSize({ width: 1200, height: 500 })
await page.goto('/')
await u.waitForAuthSkipAppStart()
await u.openAndClearDebugPanel()
await u.sendCustomCmd({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_look_at',
vantage: { x: 6615, y: -9505, z: 10344 },
center: { x: 1579, y: -635, z: 4035 },
up: { x: 0, y: 0, z: 1 },
},
})
await u.waitForCmdReceive('default_camera_look_at')
await u.clearAndCloseDebugPanel()
await page.waitForTimeout(1000)
const isMac = process.platform === 'darwin'
let noHoverColor: [number, number, number] = [82, 82, 82]
let hoverColor: [number, number, number] = [116, 116, 116]
let selectColor: [number, number, number] = [144, 148, 97]
const extrudeWall = { x: 670, y: 275 }
const extrudeText = `line([170.36, -121.61], %, $seg01)`
const cap = { x: 594, y: 283 }
const capText = `startProfileAt([-79.26, 95.04], %)`
const nothing = { x: 946, y: 229 }
expect(await u.getGreatestPixDiff(extrudeWall, noHoverColor)).toBeLessThan(
5
)
await page.mouse.move(nothing.x, nothing.y)
await page.waitForTimeout(100)
await page.mouse.move(extrudeWall.x, extrudeWall.y)
await expect(page.getByTestId('hover-highlight')).toBeVisible()
await expect(page.getByTestId('hover-highlight')).toContainText(extrudeText)
await page.waitForTimeout(200)
await expect(
await u.getGreatestPixDiff(extrudeWall, hoverColor)
).toBeLessThan(5)
await page.mouse.click(extrudeWall.x, extrudeWall.y)
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${extrudeText}`)
await page.waitForTimeout(200)
await expect(
await u.getGreatestPixDiff(extrudeWall, selectColor)
).toBeLessThan(5)
await page.waitForTimeout(1000)
// check color stays there, i.e. not overridden (this was a bug previously)
await expect(
await u.getGreatestPixDiff(extrudeWall, selectColor)
).toBeLessThan(5)
await page.mouse.move(nothing.x, nothing.y)
await page.waitForTimeout(300)
await expect(page.getByTestId('hover-highlight')).not.toBeVisible()
// because of shading, color is not exact everywhere on the face
noHoverColor = [104, 104, 104]
hoverColor = [134, 134, 134]
selectColor = [158, 162, 110]
await expect(await u.getGreatestPixDiff(cap, noHoverColor)).toBeLessThan(5)
await page.mouse.move(cap.x, cap.y)
await expect(page.getByTestId('hover-highlight')).toBeVisible()
await expect(page.getByTestId('hover-highlight')).toContainText(capText)
await page.waitForTimeout(200)
await expect(await u.getGreatestPixDiff(cap, hoverColor)).toBeLessThan(5)
await page.mouse.click(cap.x, cap.y)
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${capText}`)
await page.waitForTimeout(200)
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
await page.waitForTimeout(1000)
// check color stays there, i.e. not overridden (this was a bug previously)
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
})
})
test.describe('Command bar tests', () => {
@ -2139,10 +2477,10 @@ test.describe('Command bar tests', () => {
.or(page.getByRole('button', { name: '⌘K' }))
.click()
let cmdSearchBar = await page.getByPlaceholder('Search commands')
let cmdSearchBar = page.getByPlaceholder('Search commands')
await expect(cmdSearchBar).toBeVisible()
await page.keyboard.press('Escape')
cmdSearchBar = await page.getByPlaceholder('Search commands')
cmdSearchBar = page.getByPlaceholder('Search commands')
await expect(cmdSearchBar).not.toBeVisible()
// Now try the same, but with the keyboard shortcut, check focus
@ -2151,7 +2489,7 @@ test.describe('Command bar tests', () => {
} else {
await page.locator('html').press('Control+C')
}
cmdSearchBar = await page.getByPlaceholder('Search commands')
cmdSearchBar = page.getByPlaceholder('Search commands')
await expect(cmdSearchBar).toBeVisible()
await expect(cmdSearchBar).toBeFocused()
@ -2532,9 +2870,6 @@ fn yohey = (pos) => {
await page.getByText(selectionsSnippets.extrudeAndEditBlocked).click()
await expect(page.getByRole('button', { name: 'Extrude' })).toBeDisabled()
await expect(
page.getByRole('button', { name: 'Edit Sketch' })
).not.toBeVisible()
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
await expect(page.getByRole('button', { name: 'Extrude' })).not.toBeDisabled()
@ -2559,10 +2894,14 @@ fn yohey = (pos) => {
// selecting an editable sketch but clicking "start sketch" should start a new sketch and not edit the existing one
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
await page.getByRole('button', { name: 'Start Sketch' }).click()
await page.waitForTimeout(200)
await page.getByTestId('KCL Code').click()
await page.waitForTimeout(200)
await page.mouse.click(734, 134)
await page.waitForTimeout(100)
await page.getByTestId('KCL Code').click()
// expect main content to contain `sketch005` i.e. started a new sketch
await page.waitForTimeout(300)
await expect(page.locator('.cm-content')).toHaveText(
/sketch001 = startSketchOn\('XZ'\)/
)
@ -2846,7 +3185,7 @@ async function doEditSegmentsByDraggingHandle(page: Page, openPanes: string[]) {
}
test.describe('Can edit segments by dragging their handles', () => {
test('code pane open at start', async ({ page }) => {
test('code pane open at start-handles', async ({ page }) => {
// Load the app with the code panes
await page.addInitScript(async () => {
localStorage.setItem(
@ -2862,7 +3201,7 @@ test.describe('Can edit segments by dragging their handles', () => {
await doEditSegmentsByDraggingHandle(page, ['code'])
})
test('code pane closed at start', async ({ page }) => {
test('code pane closed at start-handles', async ({ page }) => {
// Load the app with the code panes
await page.addInitScript(async () => {
localStorage.setItem(
@ -3180,6 +3519,7 @@ test.describe('Snap to close works (at any scale)', () => {
})
test('Sketch on face', async ({ page }) => {
test.setTimeout(90_000)
const u = await getUtils(page)
await page.addInitScript(async () => {
localStorage.setItem(
@ -5373,6 +5713,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
)
await page.getByTestId('overlay-menu').click()
await page.waitForTimeout(100)
await page.getByText('Delete Segment').click()
await page.getByText('Cancel').click()
@ -5385,6 +5726,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
)
await page.getByTestId('overlay-menu').click()
await page.waitForTimeout(100)
await page.getByText('Delete Segment').click()
await page.getByText('Continue and unconstrain').last().click()
@ -5533,6 +5875,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
await expect(page.locator('.cm-content')).toContainText(before)
await page.getByTestId('overlay-menu').click()
await page.waitForTimeout(100)
await page.getByText('Remove constraints').click()
await expect(page.locator('.cm-content')).toContainText(after)
@ -5706,8 +6049,8 @@ test('Basic default modeling and sketch hotkeys work', async ({ page }) => {
await expect(extrudeButton).not.toBeDisabled()
await page.keyboard.press('e')
await page.waitForTimeout(100)
await page.mouse.move(900, 200, { steps: 5 })
await page.mouse.click(900, 200)
await page.mouse.move(800, 200, { steps: 5 })
await page.mouse.click(800, 200)
await page.waitForTimeout(100)
await page.getByRole('button', { name: 'Continue' }).click()
await page.getByRole('button', { name: 'Submit command' }).click()

View File

@ -45,8 +45,8 @@ async function clearCommandLogs(page: Page) {
await page.getByTestId('clear-commands').click()
}
async function expectCmdLog(page: Page, locatorStr: string) {
await expect(page.locator(locatorStr).last()).toBeVisible()
async function expectCmdLog(page: Page, locatorStr: string, timeout = 5000) {
await expect(page.locator(locatorStr).last()).toBeVisible({ timeout })
}
async function waitForDefaultPlanesToBeVisible(page: Page) {
@ -228,7 +228,8 @@ export async function getUtils(page: Page) {
await fillInput('z', xyz[2])
},
clearCommandLogs: () => clearCommandLogs(page),
expectCmdLog: (locatorStr: string) => expectCmdLog(page, locatorStr),
expectCmdLog: (locatorStr: string, timeout = 5000) =>
expectCmdLog(page, locatorStr, timeout),
openKclCodePanel: () => openKclCodePanel(page),
closeKclCodePanel: () => closeKclCodePanel(page),
openDebugPanel: () => openDebugPanel(page),
@ -300,11 +301,19 @@ export async function getUtils(page: Page) {
(screenshot.width * coords.y * pixMultiplier +
coords.x * pixMultiplier) *
4 // rbga is 4 channels
return Math.max(
const maxDiff = Math.max(
Math.abs(screenshot.data[index] - expected[0]),
Math.abs(screenshot.data[index + 1] - expected[1]),
Math.abs(screenshot.data[index + 2] - expected[2])
)
if (maxDiff > 4) {
console.log(
`Expected: ${expected} Actual: [${screenshot.data[index]}, ${
screenshot.data[index + 1]
}, ${screenshot.data[index + 2]}]`
)
}
return maxDiff
},
doAndWaitForImageDiff: (fn: () => Promise<any>, diffCount = 200) =>
new Promise(async (resolve) => {

View File

@ -89,7 +89,7 @@
"fmt-check": "prettier --check ./src *.ts *.json *.js ./e2e",
"fetch:wasm": "./get-latest-wasm-bundle.sh",
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm": "(cd src/wasm-lib && wasm-pack build --release --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",
"wasm-prep": "rm -rf src/wasm-lib/pkg && mkdir src/wasm-lib/pkg && rm -rf src/wasm-lib/kcl/bindings",

4
src-tauri/Cargo.lock generated
View File

@ -4546,9 +4546,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.116"
version = "1.0.118"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
dependencies = [
"indexmap 2.2.6",
"itoa 1.0.11",

View File

@ -25,6 +25,7 @@ import { LowerRightControls } from 'components/LowerRightControls'
import ModalContainer from 'react-modal-promise'
import useHotkeyWrapper from 'lib/hotkeyWrapper'
import Gizmo from 'components/Gizmo'
import { CoreDumpManager } from 'lib/coredump'
export function App() {
useRefreshSettings(paths.FILE + 'SETTINGS')
@ -55,7 +56,11 @@ export function App() {
setHtmlRef(ref)
}, [ref])
const { settings } = useSettingsAuthContext()
const { auth, settings } = useSettingsAuthContext()
const token = auth?.context?.token
const coreDumpManager = new CoreDumpManager(engineCommandManager, ref, token)
const {
app: { onboardingStatus },
} = settings.context
@ -129,7 +134,7 @@ export function App() {
<ModelingSidebar paneOpacity={paneOpacity} />
<Stream />
{/* <CamToggle /> */}
<LowerRightControls>
<LowerRightControls coreDumpManager={coreDumpManager}>
<Gizmo />
</LowerRightControls>
</div>

View File

@ -534,7 +534,7 @@ export class SceneEntities {
segmentName: 'line' | 'tangentialArcTo' = 'line',
shouldTearDown = true
) => {
const _ast = JSON.parse(JSON.stringify(kclManager.ast))
const _ast = kclManager.ast
const _node1 = getNodeFromPath<VariableDeclaration>(
_ast,
@ -692,7 +692,7 @@ export class SceneEntities {
sketchOrigin: [number, number, number],
rectangleOrigin: [x: number, y: number]
) => {
let _ast = JSON.parse(JSON.stringify(kclManager.ast))
let _ast = kclManager.ast
const _node1 = getNodeFromPath<VariableDeclaration>(
_ast,
@ -723,7 +723,9 @@ export class SceneEntities {
...getRectangleCallExpressions(rectangleOrigin, tags),
])
_ast = parse(recast(_ast))
let result = parse(recast(_ast))
if (trap(result)) return Promise.reject(result)
_ast = result
const { programMemoryOverride, truncatedAst } = await this.setupSketch({
sketchPathToNode,
@ -737,7 +739,7 @@ export class SceneEntities {
sceneInfra.setCallbacks({
onMove: async (args) => {
// Update the width and height of the draft rectangle
const pathToNodeTwo = JSON.parse(JSON.stringify(sketchPathToNode))
const pathToNodeTwo = sketchPathToNode
pathToNodeTwo[1][0] = 0
const _node = getNodeFromPath<VariableDeclaration>(
@ -799,7 +801,9 @@ export class SceneEntities {
if (sketchInit.type === 'PipeExpression') {
updateRectangleSketch(sketchInit, x, y, tags[0])
_ast = parse(recast(_ast))
let result = parse(recast(_ast))
if (trap(result)) return Promise.reject(result)
_ast = result
// Update the primary AST and unequip the rectangle tool
await kclManager.executeAstMock(_ast)
@ -1003,10 +1007,8 @@ export class SceneEntities {
PROFILE_START,
])
if (!group) return
const pathToNode: PathToNode = JSON.parse(
JSON.stringify(group.userData.pathToNode)
)
const varDecIndex = JSON.parse(JSON.stringify(pathToNode[1][0]))
const pathToNode: PathToNode = group.userData.pathToNode
const varDecIndex: number = pathToNode[1][0] as number
if (draftInfo) {
pathToNode[1][0] = 0
}
@ -1719,7 +1721,7 @@ function prepareTruncatedMemoryAndAst(
}
| Error {
const bodyIndex = Number(sketchPathToNode?.[1]?.[0]) || 0
const _ast = JSON.parse(JSON.stringify(ast))
const _ast = ast
const _node = getNodeFromPath<VariableDeclaration>(
_ast,
@ -1778,7 +1780,7 @@ function prepareTruncatedMemoryAndAst(
}
const truncatedAst: Program = {
..._ast,
body: [JSON.parse(JSON.stringify(_ast.body[bodyIndex]))],
body: [_ast.body[bodyIndex]],
}
const programMemoryOverride = programMemoryInit()
if (err(programMemoryOverride)) return programMemoryOverride
@ -1804,7 +1806,7 @@ function prepareTruncatedMemoryAndAst(
}
if (value.type === 'TagIdentifier') {
programMemoryOverride.root[key] = JSON.parse(JSON.stringify(value))
programMemoryOverride.root[key] = value
}
}
@ -1819,7 +1821,7 @@ function prepareTruncatedMemoryAndAst(
if (!memoryItem) {
continue
}
programMemoryOverride.root[name] = JSON.parse(JSON.stringify(memoryItem))
programMemoryOverride.root[name] = memoryItem
}
return {
truncatedAst,
@ -1967,9 +1969,9 @@ export async function getSketchOrientationDetails(
* @param entityId - The ID of the entity for which orientation details are being fetched.
* @returns A promise that resolves with the orientation details of the face.
*/
async function getFaceDetails(
export async function getFaceDetails(
entityId: string
): Promise<Models['FaceIsPlanar_type']> {
): Promise<Models['GetSketchModePlane_type']> {
// TODO mode engine connection to allow batching returns and batch the following
await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req',
@ -1982,8 +1984,7 @@ async function getFaceDetails(
entity_id: entityId,
},
})
// TODO change typing to get_sketch_mode_plane once lib is updated
const faceInfo: Models['FaceIsPlanar_type'] = (
const faceInfo: Models['GetSketchModePlane_type'] = (
await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),

View File

@ -151,9 +151,7 @@ export function useCalc({
ast,
engineCommandManager,
useFakeExecutor: true,
programMemoryOverride: JSON.parse(
JSON.stringify(kclManager.programMemory)
),
programMemoryOverride: kclManager.programMemory,
}).then(({ programMemory }) => {
const resultDeclaration = ast.body.find(
(a) =>

View File

@ -6,8 +6,18 @@ import { NetworkHealthIndicator } from 'components/NetworkHealthIndicator'
import { HelpMenu } from './HelpMenu'
import { Link, useLocation } from 'react-router-dom'
import { useAbsoluteFilePath } from 'hooks/useAbsoluteFilePath'
import { coreDump } from 'lang/wasm'
import toast from 'react-hot-toast'
import { CoreDumpManager } from 'lib/coredump'
import openWindow from 'lib/openWindow'
export function LowerRightControls(props: React.PropsWithChildren) {
export function LowerRightControls({
children,
coreDumpManager,
}: {
children?: React.ReactNode
coreDumpManager?: CoreDumpManager
}) {
const location = useLocation()
const filePath = useAbsoluteFilePath()
const linkOverrideClassName =
@ -15,9 +25,42 @@ export function LowerRightControls(props: React.PropsWithChildren) {
const isPlayWright = window?.localStorage.getItem('playwright') === 'true'
async function reportbug(event: { preventDefault: () => void }) {
event?.preventDefault()
if (!coreDumpManager) {
// open default reporting option
openWindow('https://github.com/KittyCAD/modeling-app/issues/new/choose')
} else {
toast
.promise(
coreDump(coreDumpManager, true),
{
loading: 'Preparing bug report...',
success: 'Bug report opened in new window',
error: 'Unable to export a core dump. Using default reporting.',
},
{
success: {
// Note: this extended duration is especially important for Playwright e2e testing
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
duration: 6000,
},
}
)
.catch((err: Error) => {
if (err) {
openWindow(
'https://github.com/KittyCAD/modeling-app/issues/new/choose'
)
}
})
}
}
return (
<section className="fixed bottom-2 right-2 flex flex-col items-end gap-3 pointer-events-none">
{props.children}
{children}
<menu className="flex items-center justify-end gap-3 pointer-events-auto">
<a
href={`https://github.com/KittyCAD/modeling-app/releases/tag/v${APP_VERSION}`}
@ -28,6 +71,7 @@ export function LowerRightControls(props: React.PropsWithChildren) {
v{isPlayWright ? '11.22.33' : APP_VERSION}
</a>
<a
onClick={reportbug}
href="https://github.com/KittyCAD/modeling-app/issues/new/choose"
target="_blank"
rel="noopener noreferrer"

View File

@ -30,7 +30,7 @@ import { wasmUrl } from 'lang/wasm'
import { PROJECT_ENTRYPOINT } from 'lib/constants'
import { useNetworkContext } from 'hooks/useNetworkContext'
import { NetworkHealthState } from 'hooks/useNetworkStatus'
import { err, trap } from 'lib/trap'
import { err } from 'lib/trap'
function getWorkspaceFolders(): LSP.WorkspaceFolder[] {
return []

View File

@ -23,6 +23,7 @@ import {
editorManager,
sceneEntitiesManager,
} from 'lib/singletons'
import { useHotkeys } from 'react-hotkeys-hook'
import { applyConstraintHorzVertDistance } from './Toolbar/SetHorzVertDistance'
import {
angleBetweenInfo,
@ -78,6 +79,7 @@ import { getVarNameModal } from 'hooks/useToolbarGuards'
import useHotkeyWrapper from 'lib/hotkeyWrapper'
import { uuidv4 } from 'lib/utils'
import { err, trap } from 'lib/trap'
import { useCommandsContext } from 'hooks/useCommandsContext'
type MachineContext<T extends AnyStateMachine> = {
state: StateFrom<T>
@ -124,7 +126,6 @@ export const ModelingMachineProvider = ({
token
)
useHotkeyWrapper(['meta + shift + .'], () => {
console.warn('CoreDump: Initializing core dump')
toast.promise(
coreDump(coreDumpManager, true),
{
@ -141,6 +142,7 @@ export const ModelingMachineProvider = ({
}
)
})
const { commandBarState } = useCommandsContext()
// Settings machine setup
// const retrievedSettings = useRef(
@ -326,6 +328,11 @@ export const ModelingMachineProvider = ({
)
updateSceneObjectColors()
// side effect to stop code mirror from updating the same selections again
editorManager.lastSelection = selections.codeBasedSelections
.map(({ range }) => `${range[1]}->${range[1]}`)
.join('&')
return {
selectionRanges: selections,
}
@ -460,6 +467,11 @@ export const ModelingMachineProvider = ({
return canExtrudeSelection(selectionRanges)
},
'has valid selection for deletion': ({ selectionRanges }) => {
if (!commandBarState.matches('Closed')) return false
if (selectionRanges.codeBasedSelections.length <= 0) return false
return true
},
'Sketch is empty': ({ sketchDetails }) => {
const node = getNodeFromPath<VariableDeclaration>(
kclManager.ast,
@ -501,7 +513,7 @@ export const ModelingMachineProvider = ({
services: {
'AST-undo-startSketchOn': async ({ sketchDetails }) => {
if (!sketchDetails) return
const newAst: Program = JSON.parse(JSON.stringify(kclManager.ast))
const newAst: Program = kclManager.ast
const varDecIndex = sketchDetails.sketchPathToNode[1][0]
// remove body item at varDecIndex
newAst.body = newAst.body.filter((_, i) => i !== varDecIndex)
@ -923,6 +935,11 @@ export const ModelingMachineProvider = ({
}
}, [modelingSend])
// Allow using the delete key to delete solids
useHotkeys(['backspace', 'delete', 'del'], () => {
modelingSend({ type: 'Delete selection' })
})
useStateMachineCommands({
machineId: 'modeling',
state: modelingState,

View File

@ -1,7 +1,25 @@
import { coreDump } from 'lang/wasm'
import { CoreDumpManager } from 'lib/coredump'
import { CustomIcon } from './CustomIcon'
import { engineCommandManager } from 'lib/singletons'
import React from 'react'
import toast from 'react-hot-toast'
import Tooltip from './Tooltip'
import { useStore } from 'useStore'
import { useSettingsAuthContext } from 'hooks/useSettingsAuthContext'
export const RefreshButton = ({ children }: React.PropsWithChildren) => {
const { auth } = useSettingsAuthContext()
const token = auth?.context?.token
const { htmlRef } = useStore((s) => ({
htmlRef: s.htmlRef,
}))
const coreDumpManager = new CoreDumpManager(
engineCommandManager,
htmlRef,
token
)
export function RefreshButton() {
async function refresh() {
if (window && 'plausible' in window) {
const p = window.plausible as (
@ -17,8 +35,26 @@ export function RefreshButton() {
})
}
// Window may not be available in some environments
window?.location.reload()
toast
.promise(
coreDump(coreDumpManager, true),
{
loading: 'Starting core dump...',
success: 'Core dump completed successfully',
error: 'Error while exporting core dump',
},
{
success: {
// Note: this extended duration is especially important for Playwright e2e testing
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
duration: 6000,
},
}
)
.then(() => {
// Window may not be available in some environments
window?.location.reload()
})
}
return (

View File

@ -83,6 +83,7 @@ export const Stream = ({ className = '' }: { className?: string }) => {
if (!videoRef.current) return
if (state.matches('Sketch')) return
if (state.matches('Sketch no face')) return
const { x, y } = getNormalisedCoordinates({
clientX: e.clientX,
clientY: e.clientY,

View File

@ -145,7 +145,7 @@ export async function applyConstraintIntersect({
const { transforms, forcedSelectionRanges } = info
const transform1 = transformSecondarySketchLinesTagFirst({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges: forcedSelectionRanges,
transformInfos: transforms,
programMemory: kclManager.programMemory,

View File

@ -106,7 +106,7 @@ export async function applyConstraintAbsDistance({
const transformInfos = info.transforms
const transform1 = transformAstSketchLines({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges: selectionRanges,
transformInfos,
programMemory: kclManager.programMemory,
@ -128,7 +128,7 @@ export async function applyConstraintAbsDistance({
)
const transform2 = transformAstSketchLines({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges: selectionRanges,
transformInfos,
programMemory: kclManager.programMemory,
@ -176,7 +176,7 @@ export function applyConstraintAxisAlign({
let finalValue = createIdentifier('ZERO')
return transformAstSketchLines({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges: selectionRanges,
transformInfos,
programMemory: kclManager.programMemory,

View File

@ -100,7 +100,7 @@ export async function applyConstraintAngleBetween({
const transformInfos = info.transforms
const transformed1 = transformSecondarySketchLinesTagFirst({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges,
transformInfos,
programMemory: kclManager.programMemory,

View File

@ -108,7 +108,7 @@ export async function applyConstraintHorzVertDistance({
if (err(info)) return Promise.reject(info)
const transformInfos = info.transforms
const transformed = transformSecondarySketchLinesTagFirst({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges,
transformInfos,
programMemory: kclManager.programMemory,

View File

@ -84,7 +84,7 @@ export async function applyConstraintAngleLength({
const { transforms } = angleLength
const sketched = transformAstSketchLines({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges,
transformInfos: transforms,
programMemory: kclManager.programMemory,
@ -139,7 +139,7 @@ export async function applyConstraintAngleLength({
}
const retval = transformAstSketchLines({
ast: JSON.parse(JSON.stringify(kclManager.ast)),
ast: kclManager.ast,
selectionRanges,
transformInfos: transforms,
programMemory: kclManager.programMemory,

View File

@ -23,7 +23,7 @@ export default class EditorManager {
}
private _lastSelectionEvent: number | null = null
private _lastSelection: string = ''
lastSelection: string = ''
private _lastEvent: { event: string; time: number } | null = null
private _modelingSend: (eventInfo: ModelingMachineEvent) => void = () => {}
@ -199,12 +199,14 @@ export default class EditorManager {
viewUpdate?.state?.selection?.ranges || []
)
if (selString === this._lastSelection) {
if (selString === this.lastSelection) {
// onUpdate is noisy and is fired a lot by extensions
// since we're only interested in selections changes we can ignore most of these.
return
}
this._lastSelection = selString
// note this is also set from the "Set selection" action to stop code mirror from updating selections right after
// selections are made from the scene
this.lastSelection = selString
if (
this._lastSelectionEvent &&

View File

@ -42,9 +42,8 @@ function registerServerCapability(
serverCapabilities: ServerCapabilities,
registration: Registration
): ServerCapabilities | Error {
const serverCapabilitiesCopy = JSON.parse(
JSON.stringify(serverCapabilities)
) as IFlexibleServerCapabilities
const serverCapabilitiesCopy =
serverCapabilities as IFlexibleServerCapabilities
const { method, registerOptions } = registration
const providerName = ServerCapabilitiesProviders[method]
@ -52,10 +51,7 @@ function registerServerCapability(
if (!registerOptions) {
serverCapabilitiesCopy[providerName] = true
} else {
serverCapabilitiesCopy[providerName] = Object.assign(
{},
JSON.parse(JSON.stringify(registerOptions))
)
serverCapabilitiesCopy[providerName] = Object.assign({}, registerOptions)
}
} else {
return new Error('Could not register server capability.')
@ -68,9 +64,8 @@ function unregisterServerCapability(
serverCapabilities: ServerCapabilities,
unregistration: Unregistration
): ServerCapabilities {
const serverCapabilitiesCopy = JSON.parse(
JSON.stringify(serverCapabilities)
) as IFlexibleServerCapabilities
const serverCapabilitiesCopy =
serverCapabilities as IFlexibleServerCapabilities
const { method } = unregistration
const providerName = ServerCapabilitiesProviders[method]

View File

@ -15,6 +15,7 @@ import {
sketchOnExtrudedFace,
deleteSegmentFromPipeExpression,
removeSingleConstraintInfo,
deleteFromSelection,
} from './modifyAst'
import { enginelessExecutor } from '../lib/testHelpers'
import { findUsesOfTagInPipe, getNodePathFromSourceRange } from './queryAst'
@ -696,3 +697,196 @@ describe('Testing removeSingleConstraintInfo', () => {
})
})
})
describe('Testing deleteFromSelection', () => {
const cases = [
[
'basicCase',
{
codeBefore: `const myVar = 5
const sketch003 = startSketchOn('XZ')
|> startProfileAt([3.82, 13.6], %)
|> line([-2.94, 2.7], %)
|> line([7.7, 0.16], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)`,
codeAfter: `const myVar = 5\n`,
lineOfInterest: 'line([-2.94, 2.7], %)',
type: 'default',
},
],
[
'delete extrude',
{
codeBefore: `const sketch001 = startSketchOn('XZ')
|> startProfileAt([3.29, 7.86], %)
|> line([2.48, 2.44], %)
|> line([2.66, 1.17], %)
|> line([3.75, 0.46], %)
|> line([4.99, -0.46], %, $seg01)
|> line([-3.86, -2.73], %)
|> line([-17.67, 0.85], %)
|> close(%)
const extrude001 = extrude(10, sketch001)`,
codeAfter: `const sketch001 = startSketchOn('XZ')
|> startProfileAt([3.29, 7.86], %)
|> line([2.48, 2.44], %)
|> line([2.66, 1.17], %)
|> line([3.75, 0.46], %)
|> line([4.99, -0.46], %, $seg01)
|> line([-3.86, -2.73], %)
|> line([-17.67, 0.85], %)
|> close(%)\n`,
lineOfInterest: 'line([2.66, 1.17], %)',
type: 'extrude-wall',
},
],
[
'delete extrude with sketch on it',
{
codeBefore: `const myVar = 5
const sketch001 = startSketchOn('XZ')
|> startProfileAt([4.46, 5.12], %, $tag)
|> line([0.08, myVar], %)
|> line([13.03, 2.02], %, $seg01)
|> line([3.9, -7.6], %)
|> line([-11.18, -2.15], %)
|> line([5.41, -9.61], %)
|> line([-8.54, -2.51], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude001 = extrude(5, sketch001)
const sketch002 = startSketchOn(extrude001, seg01)
|> startProfileAt([-12.55, 2.89], %)
|> line([3.02, 1.9], %)
|> line([1.82, -1.49], %, $seg02)
|> angledLine([-86, segLen(seg02, %)], %)
|> line([-3.97, -0.53], %)
|> line([0.3, 0.84], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)`,
codeAfter: `const myVar = 5
const sketch001 = startSketchOn('XZ')
|> startProfileAt([4.46, 5.12], %, $tag)
|> line([0.08, myVar], %)
|> line([13.03, 2.02], %, $seg01)
|> line([3.9, -7.6], %)
|> line([-11.18, -2.15], %)
|> line([5.41, -9.61], %)
|> line([-8.54, -2.51], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const sketch002 = startSketchOn({
plane: {
origin: { x: 1, y: 2, z: 3 },
x_axis: { x: 4, y: 5, z: 6 },
y_axis: { x: 7, y: 8, z: 9 },
z_axis: { x: 10, y: 11, z: 12 }
}
})
|> startProfileAt([-12.55, 2.89], %)
|> line([3.02, 1.9], %)
|> line([1.82, -1.49], %, $seg02)
|> angledLine([-86, segLen(seg02, %)], %)
|> line([-3.97, -0.53], %)
|> line([0.3, 0.84], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
`,
lineOfInterest: 'line([-11.18, -2.15], %)',
type: 'extrude-wall',
},
],
[
'delete extrude with sketch on it',
{
codeBefore: `const myVar = 5
const sketch001 = startSketchOn('XZ')
|> startProfileAt([4.46, 5.12], %, $tag)
|> line([0.08, myVar], %)
|> line([13.03, 2.02], %, $seg01)
|> line([3.9, -7.6], %)
|> line([-11.18, -2.15], %)
|> line([5.41, -9.61], %)
|> line([-8.54, -2.51], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const extrude001 = extrude(5, sketch001)
const sketch002 = startSketchOn(extrude001, seg01)
|> startProfileAt([-12.55, 2.89], %)
|> line([3.02, 1.9], %)
|> line([1.82, -1.49], %, $seg02)
|> angledLine([-86, segLen(seg02, %)], %)
|> line([-3.97, -0.53], %)
|> line([0.3, 0.84], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)`,
codeAfter: `const myVar = 5
const sketch001 = startSketchOn('XZ')
|> startProfileAt([4.46, 5.12], %, $tag)
|> line([0.08, myVar], %)
|> line([13.03, 2.02], %, $seg01)
|> line([3.9, -7.6], %)
|> line([-11.18, -2.15], %)
|> line([5.41, -9.61], %)
|> line([-8.54, -2.51], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
const sketch002 = startSketchOn({
plane: {
origin: { x: 1, y: 2, z: 3 },
x_axis: { x: 4, y: 5, z: 6 },
y_axis: { x: 7, y: 8, z: 9 },
z_axis: { x: 10, y: 11, z: 12 }
}
})
|> startProfileAt([-12.55, 2.89], %)
|> line([3.02, 1.9], %)
|> line([1.82, -1.49], %, $seg02)
|> angledLine([-86, segLen(seg02, %)], %)
|> line([-3.97, -0.53], %)
|> line([0.3, 0.84], %)
|> lineTo([profileStartX(%), profileStartY(%)], %)
|> close(%)
`,
lineOfInterest: 'startProfileAt([4.46, 5.12], %, $tag)',
type: 'end-cap',
},
],
] as const
test.each(cases)(
'%s',
async (name, { codeBefore, codeAfter, lineOfInterest, type }) => {
// const lineOfInterest = 'line([-2.94, 2.7], %)'
const ast = parse(codeBefore)
if (err(ast)) throw ast
const programMemory = await enginelessExecutor(ast)
// deleteFromSelection
const range: [number, number] = [
codeBefore.indexOf(lineOfInterest),
codeBefore.indexOf(lineOfInterest) + lineOfInterest.length,
]
const newAst = await deleteFromSelection(
ast,
{
range,
type,
},
programMemory,
async () => {
await new Promise((resolve) => setTimeout(resolve, 100))
return {
origin: { x: 1, y: 2, z: 3 },
x_axis: { x: 4, y: 5, z: 6 },
y_axis: { x: 7, y: 8, z: 9 },
z_axis: { x: 10, y: 11, z: 12 },
}
}
)
if (err(newAst)) throw newAst
const newCode = recast(newAst)
expect(newCode).toBe(codeAfter)
}
)
})

View File

@ -17,6 +17,7 @@ import {
PathToNode,
ProgramMemory,
SourceRange,
SketchGroup,
} from './wasm'
import {
isNodeSafeToReplacePath,
@ -25,6 +26,7 @@ import {
getNodeFromPath,
getNodePathFromSourceRange,
isNodeSafeToReplace,
traverse,
} from './queryAst'
import { addTagForSketchOnFace, getConstraintInfo } from './std/sketch'
import {
@ -38,6 +40,7 @@ import { isOverlap, roundOff } from 'lib/utils'
import { KCL_DEFAULT_CONSTANT_PREFIXES } from 'lib/constants'
import { ConstrainInfo } from './std/stdTypes'
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
import { Models } from '@kittycad/lib'
export function startSketchOnDefault(
node: Program,
@ -707,7 +710,7 @@ export function moveValueIntoNewVariablePath(
programMemory,
pathToNode
)
let _node = JSON.parse(JSON.stringify(ast))
let _node = ast
const boop = replacer(_node, variableName)
if (trap(boop)) return { modifiedAst: ast }
@ -739,7 +742,7 @@ export function moveValueIntoNewVariable(
programMemory,
sourceRange
)
let _node = JSON.parse(JSON.stringify(ast))
let _node = ast
const replaced = replacer(_node, variableName)
if (trap(replaced)) return { modifiedAst: ast }
@ -764,7 +767,7 @@ export function deleteSegmentFromPipeExpression(
code: string,
pathToNode: PathToNode
): Program | Error {
let _modifiedAst: Program = JSON.parse(JSON.stringify(modifiedAst))
let _modifiedAst: Program = modifiedAst
dependentRanges.forEach((range) => {
const path = getNodePathFromSourceRange(_modifiedAst, range)
@ -873,3 +876,175 @@ export function removeSingleConstraintInfo(
if (err(retval)) return false
return retval
}
export async function deleteFromSelection(
ast: Program,
selection: Selection,
programMemory: ProgramMemory,
getFaceDetails: (id: string) => Promise<Models['FaceIsPlanar_type']> = () =>
({} as any)
): Promise<Program | Error> {
const astClone = ast
const range = selection.range
const path = getNodePathFromSourceRange(ast, range)
const varDec = getNodeFromPath<VariableDeclarator>(
ast,
path,
'VariableDeclarator'
)
if (err(varDec)) return varDec
if (
(selection.type === 'extrude-wall' ||
selection.type === 'end-cap' ||
selection.type === 'start-cap') &&
varDec.node.init.type === 'PipeExpression'
) {
const varDecName = varDec.node.id.name
let pathToNode: PathToNode | null = null
let extrudeNameToDelete = ''
traverse(astClone, {
enter: (node, path) => {
if (node.type === 'VariableDeclaration') {
const dec = node.declarations[0]
if (
dec.init.type === 'CallExpression' &&
(dec.init.callee.name === 'extrude' ||
dec.init.callee.name === 'revolve') &&
dec.init.arguments?.[1].type === 'Identifier' &&
dec.init.arguments?.[1].name === varDecName
) {
pathToNode = path
extrudeNameToDelete = dec.id.name
}
}
},
})
if (!pathToNode) return new Error('Could not find extrude variable')
const expressionIndex = pathToNode[1][0] as number
astClone.body.splice(expressionIndex, 1)
if (extrudeNameToDelete) {
await new Promise(async (resolve) => {
let currentVariableName = ''
const pathsDependingOnExtrude: Array<{
path: PathToNode
sketchName: string
}> = []
traverse(astClone, {
leave: (node) => {
if (node.type === 'VariableDeclaration') {
currentVariableName = ''
}
},
enter: async (node, path) => {
if (node.type === 'VariableDeclaration') {
currentVariableName = node.declarations[0].id.name
}
if (
// match startSketchOn(${extrudeNameToDelete})
node.type === 'CallExpression' &&
node.callee.name === 'startSketchOn' &&
node.arguments[0].type === 'Identifier' &&
node.arguments[0].name === extrudeNameToDelete
) {
pathsDependingOnExtrude.push({
path,
sketchName: currentVariableName,
})
}
},
})
const roundLiteral = (x: number) => createLiteral(roundOff(x))
const modificationDetails: {
parent: PipeExpression['body']
faceDetails: Models['FaceIsPlanar_type']
lastKey: number
}[] = []
for (const { path, sketchName } of pathsDependingOnExtrude) {
const parent = getNodeFromPath<PipeExpression['body']>(
astClone,
path.slice(0, -1)
)
if (err(parent)) {
return
}
const sketchToPreserve = programMemory.root[sketchName] as SketchGroup
console.log('sketchName', sketchName)
// Can't kick off multiple requests at once as getFaceDetails
// is three engine calls in one and they conflict
const faceDetails = await getFaceDetails(sketchToPreserve.on.id)
if (
!(
faceDetails.origin &&
faceDetails.x_axis &&
faceDetails.y_axis &&
faceDetails.z_axis
)
) {
return
}
const lastKey = Number(path.slice(-1)[0][0])
modificationDetails.push({
parent: parent.node,
faceDetails,
lastKey,
})
}
for (const { parent, faceDetails, lastKey } of modificationDetails) {
if (
!(
faceDetails.origin &&
faceDetails.x_axis &&
faceDetails.y_axis &&
faceDetails.z_axis
)
) {
continue
}
parent[lastKey] = createCallExpressionStdLib('startSketchOn', [
createObjectExpression({
plane: createObjectExpression({
origin: createObjectExpression({
x: roundLiteral(faceDetails.origin.x),
y: roundLiteral(faceDetails.origin.y),
z: roundLiteral(faceDetails.origin.z),
}),
x_axis: createObjectExpression({
x: roundLiteral(faceDetails.x_axis.x),
y: roundLiteral(faceDetails.x_axis.y),
z: roundLiteral(faceDetails.x_axis.z),
}),
y_axis: createObjectExpression({
x: roundLiteral(faceDetails.y_axis.x),
y: roundLiteral(faceDetails.y_axis.y),
z: roundLiteral(faceDetails.y_axis.z),
}),
z_axis: createObjectExpression({
x: roundLiteral(faceDetails.z_axis.x),
y: roundLiteral(faceDetails.z_axis.y),
z: roundLiteral(faceDetails.z_axis.z),
}),
}),
}),
])
}
resolve(true)
})
}
// await prom
return astClone
} else if (varDec.node.init.type === 'PipeExpression') {
const pipeBody = varDec.node.init.body
if (
pipeBody[0].type === 'CallExpression' &&
pipeBody[0].callee.name === 'startSketchOn'
) {
// remove varDec
const varDecIndex = varDec.shallowPath[1][0] as number
astClone.body.splice(varDecIndex, 1)
return astClone
}
}
return new Error('Selection not recognised, could not delete')
}

View File

@ -87,10 +87,7 @@ const yo2 = hmm([identifierGuy + 5])`
expect(result.isSafe).toBe(true)
expect(result.value?.type).toBe('BinaryExpression')
expect(code.slice(result.value.start, result.value.end)).toBe('100 + 100')
const replaced = result.replacer(
JSON.parse(JSON.stringify(ast)),
'replaceName'
)
const replaced = result.replacer(ast, 'replaceName')
if (err(replaced)) throw replaced
const outCode = recast(replaced.modifiedAst)
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
@ -114,10 +111,7 @@ const yo2 = hmm([identifierGuy + 5])`
expect(result.isSafe).toBe(true)
expect(result.value?.type).toBe('CallExpression')
expect(code.slice(result.value.start, result.value.end)).toBe("def('yo')")
const replaced = result.replacer(
JSON.parse(JSON.stringify(ast)),
'replaceName'
)
const replaced = result.replacer(ast, 'replaceName')
if (err(replaced)) throw replaced
const outCode = recast(replaced.modifiedAst)
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
@ -154,10 +148,7 @@ const yo2 = hmm([identifierGuy + 5])`
expect(result.isSafe).toBe(true)
expect(result.value?.type).toBe('BinaryExpression')
expect(code.slice(result.value.start, result.value.end)).toBe('5 + 6')
const replaced = result.replacer(
JSON.parse(JSON.stringify(ast)),
'replaceName'
)
const replaced = result.replacer(ast, 'replaceName')
if (err(replaced)) throw replaced
const outCode = recast(replaced.modifiedAst)
expect(outCode).toContain(`const yo = replaceName`)
@ -173,10 +164,7 @@ const yo2 = hmm([identifierGuy + 5])`
expect(code.slice(result.value.start, result.value.end)).toBe(
"jkl('yo') + 2"
)
const replaced = result.replacer(
JSON.parse(JSON.stringify(ast)),
'replaceName'
)
const replaced = result.replacer(ast, 'replaceName')
if (err(replaced)) throw replaced
const { modifiedAst } = replaced
const outCode = recast(modifiedAst)
@ -195,10 +183,7 @@ const yo2 = hmm([identifierGuy + 5])`
expect(code.slice(result.value.start, result.value.end)).toBe(
'identifierGuy + 5'
)
const replaced = result.replacer(
JSON.parse(JSON.stringify(ast)),
'replaceName'
)
const replaced = result.replacer(ast, 'replaceName')
if (err(replaced)) throw replaced
const { modifiedAst } = replaced
const outCode = recast(modifiedAst)

View File

@ -520,8 +520,8 @@ export function isNodeSafeToReplacePath(
const replaceNodeWithIdentifier: ReplacerFn = (_ast, varName) => {
const identifier = createIdentifier(varName)
const last = finPath[finPath.length - 1]
const pathToReplaced = JSON.parse(JSON.stringify(finPath))
pathToReplaced[1][0] = pathToReplaced[1][0] + 1
const pathToReplaced = finPath
pathToReplaced[1][0] = (pathToReplaced[1][0] as number) + 1
const startPath = finPath.slice(0, -1)
const _nodeToReplace = getNodeFromPath(_ast, startPath)
if (err(_nodeToReplace)) return _nodeToReplace

View File

@ -24,11 +24,7 @@ import {
isNotLiteralArrayOrStatic,
} from 'lang/std/sketchcombos'
import { toolTips, ToolTip } from '../../useStore'
import {
createIdentifier,
createPipeExpression,
splitPathAtPipeExpression,
} from '../modifyAst'
import { createPipeExpression, splitPathAtPipeExpression } from '../modifyAst'
import {
SketchLineHelper,

View File

@ -1496,7 +1496,7 @@ export function transformSecondarySketchLinesTagFirst({
}
}
| Error {
// let node = JSON.parse(JSON.stringify(ast))
// let node = ast
const primarySelection = selectionRanges.codeBasedSelections[0].range
const _tag = giveSketchFnCallTag(ast, primarySelection, forceSegName)
@ -1565,7 +1565,7 @@ export function transformAstSketchLines({
}
| Error {
// deep clone since we are mutating in a loop, of which any could fail
let node = JSON.parse(JSON.stringify(ast))
let node = ast
let _valueUsedInTransform // TODO should this be an array?
const pathToNodeMap: PathToNodeMap = {}

View File

@ -33,7 +33,7 @@ export function updatePathToNodeFromMap(
oldPath: PathToNode,
pathToNodeMap: { [key: number]: PathToNode }
): PathToNode {
const updatedPathToNode = JSON.parse(JSON.stringify(oldPath))
const updatedPathToNode = oldPath
let max = 0
Object.values(pathToNodeMap).forEach((path) => {
const index = Number(path[1][0])

View File

@ -334,6 +334,7 @@ export async function coreDump(
openGithubIssue: boolean = false
): Promise<CoreDumpInfo> {
try {
console.warn('CoreDump: Initializing core dump')
const dump: CoreDumpInfo = await coredump(coreDumpManager)
/* NOTE: this console output of the coredump should include the field
`github_issue_url` which is not in the uploaded coredump file.

View File

@ -13,6 +13,14 @@ import screenshot from 'lib/screenshot'
import React from 'react'
import { VITE_KC_API_BASE_URL } from 'env'
/* eslint-disable suggest-no-throw/suggest-no-throw --
* All the throws in CoreDumpManager are intentional and should be caught and handled properly
* by the calling Promises with a catch block. The throws are essential to properly handling
* when the app isn't ready enough or otherwise unable to produce a core dump. By throwing
* instead of simply erroring, the code halts execution at the first point which it cannot
* complete the core dump request.
**/
/**
* CoreDumpManager module
* - for getting all the values from the JS world to pass to the Rust world for a core dump.
@ -22,6 +30,7 @@ import { VITE_KC_API_BASE_URL } from 'env'
// CoreDumpManager is instantiated in ModelingMachineProvider and passed to coreDump() in wasm.ts
// The async function coreDump() handles any errors thrown in its Promise catch method and rethrows
// them to so the toast handler in ModelingMachineProvider can show the user an error message toast
// TODO: Throw more
export class CoreDumpManager {
engineCommandManager: EngineCommandManager
htmlRef: React.RefObject<HTMLDivElement> | null

View File

@ -9,12 +9,12 @@ const wallMountL = 6 // the length of the bracket
const sigmaAllow = 35000 // psi
const width = 6 // inch
const p = 300 // Force on shelf - lbs
const L = 12 // inches
const M = L * p / 2 // Moment experienced at fixed end of bracket
const FOS = 2 // Factor of safety of 2 to be conservative
const shelfLength = 12 // inches
const moment = shelfLength * p / 2 // Moment experienced at fixed end of bracket
const factorOfSafety = 2 // Factor of safety of 2 to be conservative
// Calculate the thickness off the bending stress and factor of safety
const thickness = sqrt(6 * M * FOS / (width * sigmaAllow))
const thickness = sqrt(6 * moment * factorOfSafety / (width * sigmaAllow))
// 0.25 inch fillet radius
const filletR = 0.25

View File

@ -29,7 +29,10 @@ export function cleanErrs<T>(
return [argsWOutErr.length !== value.length, argsWOutErr, argsWErr]
}
// Used to report errors to user at a certain point in execution
/**
* Used to report errors to user at a certain point in execution
* @returns boolean
*/
export function trap<T>(
value: ExcludeErr<T> | Error,
opts?: {
@ -43,6 +46,8 @@ export function trap<T>(
console.error(value)
opts?.suppress ||
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString())
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString(), {
id: 'error',
})
return true
}

View File

@ -96,9 +96,7 @@ export function useCalculateKclExpression({
ast,
engineCommandManager,
useFakeExecutor: true,
programMemoryOverride: JSON.parse(
JSON.stringify(kclManager.programMemory)
),
programMemoryOverride: kclManager.programMemory,
})
const resultDeclaration = ast.body.find(
(a) =>

View File

@ -26,7 +26,11 @@ import {
applyConstraintEqualLength,
setEqualLengthInfo,
} from 'components/Toolbar/EqualLength'
import { addStartProfileAt, extrudeSketch } from 'lang/modifyAst'
import {
addStartProfileAt,
deleteFromSelection,
extrudeSketch,
} from 'lang/modifyAst'
import { getNodeFromPath } from '../lang/queryAst'
import {
applyConstraintEqualAngle,
@ -44,12 +48,14 @@ import {
import { Models } from '@kittycad/lib/dist/types/src'
import { ModelingCommandSchema } from 'lib/commandBarConfigs/modelingCommandConfig'
import { err, trap } from 'lib/trap'
import { DefaultPlaneStr } from 'clientSideScene/sceneEntities'
import { DefaultPlaneStr, getFaceDetails } from 'clientSideScene/sceneEntities'
import { Vector3 } from 'three'
import { quaternionFromUpNForward } from 'clientSideScene/helpers'
import { uuidv4 } from 'lib/utils'
import { Coords2d } from 'lang/std/sketch'
import { deleteSegment } from 'clientSideScene/ClientSideSceneComp'
import { executeAst } from 'useStore'
import toast from 'react-hot-toast'
export const MODELING_PERSIST_KEY = 'MODELING_PERSIST_KEY'
@ -157,6 +163,9 @@ export type ModelingMachineEvent =
type: 'Set selection'
data: SetSelections
}
| {
type: 'Delete selection'
}
| { type: 'Sketch no face' }
| { type: 'Toggle gui mode' }
| { type: 'Cancel' }
@ -273,6 +282,13 @@ export const modelingMachine = createMachine(
cond: 'Has exportable geometry',
actions: 'Engine export',
},
'Delete selection': {
target: 'idle',
cond: 'has valid selection for deletion',
actions: ['AST delete selection'],
internal: true,
},
},
entry: 'reset client scene mouse handlers',
@ -963,6 +979,42 @@ export const modelingMachine = createMachine(
editorManager.selectRange(updatedAst?.selections)
}
},
'AST delete selection': async ({ sketchDetails, selectionRanges }) => {
let ast = kclManager.ast
const getScaledFaceDetails = async (entityId: string) => {
const faceDetails = await getFaceDetails(entityId)
if (err(faceDetails)) return {}
return {
...faceDetails,
origin: {
x: faceDetails.origin.x / sceneInfra._baseUnitMultiplier,
y: faceDetails.origin.y / sceneInfra._baseUnitMultiplier,
z: faceDetails.origin.z / sceneInfra._baseUnitMultiplier,
},
}
}
const modifiedAst = await deleteFromSelection(
ast,
selectionRanges.codeBasedSelections[0],
kclManager.programMemory,
getScaledFaceDetails
)
if (err(modifiedAst)) return
const testExecute = await executeAst({
ast: modifiedAst,
useFakeExecutor: true,
engineCommandManager,
})
if (testExecute.errors.length) {
toast.error('Unable to delete part')
return
}
await kclManager.updateAst(modifiedAst, true)
},
'conditionally equip line tool': (_, { type }) => {
if (type === 'done.invoke.animate-to-face') {
sceneInfra.modelingSend('Equip Line tool')

View File

@ -533,6 +533,7 @@ dependencies = [
"ciborium",
"clap",
"criterion-plot",
"futures",
"is-terminal",
"itertools 0.10.5",
"num-traits",
@ -545,6 +546,7 @@ dependencies = [
"serde_derive",
"serde_json",
"tinytemplate",
"tokio",
"walkdir",
]
@ -710,7 +712,7 @@ dependencies = [
[[package]]
name = "derive-docs"
version = "0.1.18"
version = "0.1.19"
dependencies = [
"Inflector",
"anyhow",
@ -1383,7 +1385,7 @@ dependencies = [
[[package]]
name = "kcl-lib"
version = "0.1.67"
version = "0.1.68"
dependencies = [
"anyhow",
"approx",
@ -3275,9 +3277,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]]
name = "ts-rs"
version = "9.0.0"
version = "9.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e2dcf58e612adda9a83800731e8e4aba04d8a302b9029617b0b6e4b021d5357"
checksum = "b44017f9f875786e543595076374b9ef7d13465a518dd93d6ccdbf5b432dde8c"
dependencies = [
"chrono",
"serde_json",
@ -3289,9 +3291,9 @@ dependencies = [
[[package]]
name = "ts-rs-macros"
version = "9.0.0"
version = "9.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbdee324e50a7402416d9c25270d3df4241ed528af5d36dda18b6f219551c577"
checksum = "c88cc88fd23b5a04528f3a8436024f20010a16ec18eb23c164b1242f65860130"
dependencies = [
"proc-macro2",
"quote",

View File

@ -1,7 +1,7 @@
[package]
name = "derive-docs"
description = "A tool for generating documentation from Rust derive macros"
version = "0.1.18"
version = "0.1.19"
edition = "2021"
license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app"

View File

@ -761,7 +761,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -795,7 +795,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
let program = parser.ast().unwrap();
let ctx = crate::executor::ExecutorContext::new(&client, Default::default()).await.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
// Zoom to fit.
ctx.engine

View File

@ -16,7 +16,7 @@ mod test_examples_someFn {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_someFn {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_someFn {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_someFn {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_show {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_show {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),
@ -106,7 +106,7 @@ mod test_examples_show {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -135,7 +135,7 @@ mod test_examples_show {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_show {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_show {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -17,7 +17,7 @@ mod test_examples_my_func {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -47,7 +47,7 @@ mod test_examples_my_func {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),
@ -108,7 +108,7 @@ mod test_examples_my_func {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -137,7 +137,7 @@ mod test_examples_my_func {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -17,7 +17,7 @@ mod test_examples_line_to {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -47,7 +47,7 @@ mod test_examples_line_to {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),
@ -108,7 +108,7 @@ mod test_examples_line_to {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -137,7 +137,7 @@ mod test_examples_line_to {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_min {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_min {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),
@ -106,7 +106,7 @@ mod test_examples_min {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -135,7 +135,7 @@ mod test_examples_min {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_show {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_show {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_import {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_import {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_import {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_import {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_import {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_import {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -16,7 +16,7 @@ mod test_examples_show {
settings: Default::default(),
is_mock: true,
};
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
}
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
@ -45,7 +45,7 @@ mod test_examples_show {
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
.await
.unwrap();
ctx.run(program, None).await.unwrap();
ctx.run(&program, None).await.unwrap();
ctx.engine
.send_modeling_cmd(
uuid::Uuid::new_v4(),

View File

@ -157,7 +157,7 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
// Let users know if the test is taking a long time.
let (done_tx, done_rx) = oneshot::channel::<()>();
let timer = time_until(done_rx);
let snapshot = match state.execute_and_prepare_snapshot(program).await {
let snapshot = match state.execute_and_prepare_snapshot(&program).await {
Ok(sn) => sn,
Err(e) => return kcl_err(e),
};

View File

@ -1,7 +1,7 @@
[package]
name = "kcl-lib"
description = "KittyCAD Language implementation and tools"
version = "0.1.67"
version = "0.1.68"
edition = "2021"
license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app"
@ -19,7 +19,7 @@ chrono = "0.4.38"
clap = { version = "4.5.7", default-features = false, optional = true }
dashmap = "6.0.1"
databake = { version = "0.1.8", features = ["derive"] }
derive-docs = { version = "0.1.18", path = "../derive-docs" }
derive-docs = { version = "0.1.19", path = "../derive-docs" }
form_urlencoded = "1.2.1"
futures = { version = "0.3.30" }
git_rev = "0.1.0"
@ -28,7 +28,7 @@ kittycad = { workspace = true, features = ["clap"] }
lazy_static = "1.5.0"
mime_guess = "2.0.4"
parse-display = "0.9.1"
pyo3 = {version = "0.22.0", optional = true}
pyo3 = { version = "0.22.0", optional = true }
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
ropey = "1.6.1"
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
@ -37,7 +37,7 @@ serde_json = "1.0.118"
sha2 = "0.10.8"
thiserror = "1.0.61"
toml = "0.8.14"
ts-rs = { version = "9.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
ts-rs = { version = "9.0.1", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
url = { version = "2.5.2", features = ["serde"] }
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
validator = { version = "0.18.1", features = ["derive"] }
@ -67,6 +67,8 @@ cli = ["dep:clap"]
disable-println = []
engine = []
pyo3 = ["dep:pyo3"]
# Helper functions also used in benchmarks.
lsp-test-util = []
[profile.release]
panic = "abort"
@ -78,10 +80,10 @@ debug = true # Flamegraphs of benchmarks require accurate debug symbols
[dev-dependencies]
base64 = "0.22.1"
convert_case = "0.6.0"
criterion = "0.5.1"
criterion = { version = "0.5.1", features = ["async_tokio"] }
expectorate = "1.1.0"
iai = "0.1"
image = {version = "0.25.1", default-features = false, features = ["png"] }
image = { version = "0.25.1", default-features = false, features = ["png"] }
insta = { version = "1.38.0", features = ["json"] }
itertools = "0.13.0"
pretty_assertions = "1.4.0"
@ -95,3 +97,13 @@ harness = false
[[bench]]
name = "compiler_benchmark_iai"
harness = false
[[bench]]
name = "lsp_semantic_tokens_benchmark_criterion"
harness = false
required-features = ["lsp-test-util"]
[[bench]]
name = "lsp_semantic_tokens_benchmark_iai"
harness = false
required-features = ["lsp-test-util"]

View File

@ -0,0 +1,65 @@
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use kcl_lib::lsp::test_util::kcl_lsp_server;
use tokio::runtime::Runtime;
use tower_lsp::LanguageServer;
async fn kcl_lsp_semantic_tokens(code: &str) {
let server = kcl_lsp_server(false).await.unwrap();
// Send open file.
server
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
text_document: tower_lsp::lsp_types::TextDocumentItem {
uri: "file:///test.kcl".try_into().unwrap(),
language_id: "kcl".to_string(),
version: 1,
text: code.to_string(),
},
})
.await;
// Send semantic tokens request.
black_box(
server
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "file:///test.kcl".try_into().unwrap(),
},
partial_result_params: Default::default(),
work_done_progress_params: Default::default(),
})
.await
.unwrap()
.unwrap(),
);
}
fn bench_kcl_lsp_semantic_tokens(c: &mut Criterion) {
for (name, code) in [
("pipes_on_pipes", PIPES_PROGRAM),
("big_kitt", KITT_PROGRAM),
("cube", CUBE_PROGRAM),
("math", MATH_PROGRAM),
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
("global_tags", GLOBAL_TAGS_FILE),
] {
c.bench_with_input(BenchmarkId::new("semantic_tokens_", name), &code, |b, &s| {
let rt = Runtime::new().unwrap();
// Spawn a future onto the runtime
b.iter(|| {
rt.block_on(kcl_lsp_semantic_tokens(s));
});
});
}
}
criterion_group!(benches, bench_kcl_lsp_semantic_tokens);
criterion_main!(benches);
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");
const MATH_PROGRAM: &str = include_str!("../../tests/executor/inputs/math.kcl");
const MIKE_STRESS_TEST_PROGRAM: &str = include_str!("../../tests/executor/inputs/mike_stress_test.kcl");
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");

View File

@ -0,0 +1,45 @@
use iai::black_box;
use kcl_lib::lsp::test_util::kcl_lsp_server;
use tower_lsp::LanguageServer;
async fn kcl_lsp_semantic_tokens(code: &str) {
let server = kcl_lsp_server(false).await.unwrap();
// Send open file.
server
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
text_document: tower_lsp::lsp_types::TextDocumentItem {
uri: "file:///test.kcl".try_into().unwrap(),
language_id: "kcl".to_string(),
version: 1,
text: code.to_string(),
},
})
.await;
// Send semantic tokens request.
black_box(
server
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "file:///test.kcl".try_into().unwrap(),
},
partial_result_params: Default::default(),
work_done_progress_params: Default::default(),
})
.await
.unwrap()
.unwrap(),
);
}
async fn semantic_tokens_global_tags() {
let code = GLOBAL_TAGS_FILE;
kcl_lsp_semantic_tokens(code).await;
}
iai::main! {
semantic_tokens_global_tags,
}
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");

View File

@ -32,9 +32,6 @@ use crate::{
mod literal_value;
mod none;
/// Position-independent digest of the AST node.
pub type Digest = [u8; 32];
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
@ -44,8 +41,6 @@ pub struct Program {
pub end: usize,
pub body: Vec<BodyItem>,
pub non_code_meta: NonCodeMeta,
pub digest: Option<Digest>,
}
impl Program {
@ -893,8 +888,6 @@ pub struct NonCodeNode {
pub start: usize,
pub end: usize,
pub value: NonCodeValue,
pub digest: Option<Digest>,
}
impl From<NonCodeNode> for SourceRange {
@ -1028,8 +1021,6 @@ pub enum NonCodeValue {
pub struct NonCodeMeta {
pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>,
pub start: Vec<NonCodeNode>,
pub digest: Option<Digest>,
}
// implement Deserialize manually because we to force the keys of non_code_nodes to be usize
@ -1055,7 +1046,6 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
Ok(NonCodeMeta {
non_code_nodes,
start: helper.start,
digest: None,
})
}
}
@ -1084,8 +1074,6 @@ pub struct ExpressionStatement {
pub start: usize,
pub end: usize,
pub expression: Value,
pub digest: Option<Digest>,
}
impl_value_meta!(ExpressionStatement);
@ -1100,8 +1088,6 @@ pub struct CallExpression {
pub callee: Identifier,
pub arguments: Vec<Value>,
pub optional: bool,
pub digest: Option<Digest>,
}
impl_value_meta!(CallExpression);
@ -1120,7 +1106,6 @@ impl CallExpression {
callee: Identifier::new(name),
arguments,
optional: false,
digest: None,
})
}
@ -1232,7 +1217,7 @@ impl CallExpression {
// Call the stdlib function
let p = func.function().clone().body;
let results = match ctx.inner_execute(p, &mut fn_memory, BodyType::Block).await {
let results = match ctx.inner_execute(&p, &mut fn_memory, BodyType::Block).await {
Ok(results) => results,
Err(err) => {
// We need to override the source ranges so we don't get the embedded kcl
@ -1370,8 +1355,6 @@ pub struct VariableDeclaration {
pub end: usize,
pub declarations: Vec<VariableDeclarator>,
pub kind: VariableKind, // Change to enum if there are specific values
pub digest: Option<Digest>,
}
impl From<&VariableDeclaration> for Vec<CompletionItem> {
@ -1417,7 +1400,6 @@ impl VariableDeclaration {
end: 0,
declarations,
kind,
digest: None,
}
}
@ -1640,8 +1622,6 @@ pub struct VariableDeclarator {
pub id: Identifier,
/// The value of the variable.
pub init: Value,
pub digest: Option<Digest>,
}
impl_value_meta!(VariableDeclarator);
@ -1653,7 +1633,6 @@ impl VariableDeclarator {
end: 0,
id: Identifier::new(name),
init,
digest: None,
}
}
@ -1671,8 +1650,6 @@ pub struct Literal {
pub end: usize,
pub value: LiteralValue,
pub raw: String,
pub digest: Option<Digest>,
}
impl_value_meta!(Literal);
@ -1684,7 +1661,6 @@ impl Literal {
end: 0,
raw: JValue::from(value.clone()).to_string(),
value,
digest: None,
}
}
@ -1745,8 +1721,6 @@ pub struct Identifier {
pub start: usize,
pub end: usize,
pub name: String,
pub digest: Option<Digest>,
}
impl_value_meta!(Identifier);
@ -1757,7 +1731,6 @@ impl Identifier {
start: 0,
end: 0,
name: name.to_string(),
digest: None,
}
}
@ -1786,8 +1759,6 @@ pub struct TagDeclarator {
pub end: usize,
#[serde(rename = "value")]
pub name: String,
pub digest: Option<Digest>,
}
impl_value_meta!(TagDeclarator);
@ -1847,7 +1818,6 @@ impl TagDeclarator {
start: 0,
end: 0,
name: name.to_string(),
digest: None,
}
}
@ -1910,19 +1880,13 @@ impl TagDeclarator {
pub struct PipeSubstitution {
pub start: usize,
pub end: usize,
pub digest: Option<Digest>,
}
impl_value_meta!(PipeSubstitution);
impl PipeSubstitution {
pub fn new() -> Self {
Self {
start: 0,
end: 0,
digest: None,
}
Self { start: 0, end: 0 }
}
}
@ -1946,8 +1910,6 @@ pub struct ArrayExpression {
pub start: usize,
pub end: usize,
pub elements: Vec<Value>,
pub digest: Option<Digest>,
}
impl_value_meta!(ArrayExpression);
@ -1964,7 +1926,6 @@ impl ArrayExpression {
start: 0,
end: 0,
elements,
digest: None,
}
}
@ -2105,8 +2066,6 @@ pub struct ObjectExpression {
pub start: usize,
pub end: usize,
pub properties: Vec<ObjectProperty>,
pub digest: Option<Digest>,
}
impl ObjectExpression {
@ -2115,7 +2074,6 @@ impl ObjectExpression {
start: 0,
end: 0,
properties,
digest: None,
}
}
@ -2269,8 +2227,6 @@ pub struct ObjectProperty {
pub end: usize,
pub key: Identifier,
pub value: Value,
pub digest: Option<Digest>,
}
impl_value_meta!(ObjectProperty);
@ -2399,8 +2355,6 @@ pub struct MemberExpression {
pub object: MemberObject,
pub property: LiteralIdentifier,
pub computed: bool,
pub digest: Option<Digest>,
}
impl_value_meta!(MemberExpression);
@ -2566,8 +2520,6 @@ pub struct BinaryExpression {
pub operator: BinaryOperator,
pub left: BinaryPart,
pub right: BinaryPart,
pub digest: Option<Digest>,
}
impl_value_meta!(BinaryExpression);
@ -2580,7 +2532,6 @@ impl BinaryExpression {
operator,
left,
right,
digest: None,
}
}
@ -2805,8 +2756,6 @@ pub struct UnaryExpression {
pub end: usize,
pub operator: UnaryOperator,
pub argument: BinaryPart,
pub digest: Option<Digest>,
}
impl_value_meta!(UnaryExpression);
@ -2818,7 +2767,6 @@ impl UnaryExpression {
end: argument.end(),
operator,
argument,
digest: None,
}
}
@ -2909,8 +2857,6 @@ pub struct PipeExpression {
// The rest will be CallExpression, and the AST type should reflect this.
pub body: Vec<Value>,
pub non_code_meta: NonCodeMeta,
pub digest: Option<Digest>,
}
impl_value_meta!(PipeExpression);
@ -2928,7 +2874,6 @@ impl PipeExpression {
end: 0,
body,
non_code_meta: Default::default(),
digest: None,
}
}
@ -3125,8 +3070,6 @@ pub struct Parameter {
pub type_: Option<FnArgType>,
/// Is the parameter optional?
pub optional: bool,
pub digest: Option<Digest>,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
@ -3140,8 +3083,6 @@ pub struct FunctionExpression {
pub body: Program,
#[serde(skip)]
pub return_type: Option<FnArgType>,
pub digest: Option<Digest>,
}
impl_value_meta!(FunctionExpression);
@ -3177,7 +3118,6 @@ impl FunctionExpression {
end,
params,
body,
digest: _,
return_type: _,
} = self;
let mut params_required = Vec::with_capacity(params.len());
@ -3258,8 +3198,6 @@ pub struct ReturnStatement {
pub start: usize,
pub end: usize,
pub argument: Value,
pub digest: Option<Digest>,
}
impl_value_meta!(ReturnStatement);
@ -4993,34 +4931,28 @@ const firstExtrude = startSketchOn('XY')
identifier: Identifier {
start: 35,
end: 40,
name: "thing".to_owned(),
digest: None,
name: "thing".to_owned()
},
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
optional: false,
digest: None
optional: false
},
Parameter {
identifier: Identifier {
start: 50,
end: 56,
name: "things".to_owned(),
digest: None,
name: "things".to_owned()
},
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
optional: false,
digest: None
optional: false
},
Parameter {
identifier: Identifier {
start: 68,
end: 72,
name: "more".to_owned(),
digest: None
name: "more".to_owned()
},
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
optional: true,
digest: None
optional: true
}
]
})
@ -5055,34 +4987,28 @@ const firstExtrude = startSketchOn('XY')
identifier: Identifier {
start: 18,
end: 23,
name: "thing".to_owned(),
digest: None
name: "thing".to_owned()
},
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
optional: false,
digest: None
optional: false
},
Parameter {
identifier: Identifier {
start: 33,
end: 39,
name: "things".to_owned(),
digest: None
name: "things".to_owned()
},
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
optional: false,
digest: None
optional: false
},
Parameter {
identifier: Identifier {
start: 51,
end: 55,
name: "more".to_owned(),
digest: None
name: "more".to_owned()
},
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
optional: true,
digest: None
optional: true
}
]
})
@ -5175,10 +5101,8 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
end: 0,
body: Vec::new(),
non_code_meta: Default::default(),
digest: None,
},
return_type: None,
digest: None,
},
),
(
@ -5192,21 +5116,17 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
start: 0,
end: 0,
name: "foo".to_owned(),
digest: None,
},
type_: None,
optional: false,
digest: None,
}],
body: Program {
start: 0,
end: 0,
body: Vec::new(),
non_code_meta: Default::default(),
digest: None,
},
return_type: None,
digest: None,
},
),
(
@ -5220,21 +5140,17 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
start: 0,
end: 0,
name: "foo".to_owned(),
digest: None,
},
type_: None,
optional: true,
digest: None,
}],
body: Program {
start: 0,
end: 0,
body: Vec::new(),
non_code_meta: Default::default(),
digest: None,
},
return_type: None,
digest: None,
},
),
(
@ -5249,22 +5165,18 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
start: 0,
end: 0,
name: "foo".to_owned(),
digest: None,
},
type_: None,
optional: false,
digest: None,
},
Parameter {
identifier: Identifier {
start: 0,
end: 0,
name: "bar".to_owned(),
digest: None,
},
type_: None,
optional: true,
digest: None,
},
],
body: Program {
@ -5272,10 +5184,8 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
end: 0,
body: Vec::new(),
non_code_meta: Default::default(),
digest: None,
},
return_type: None,
digest: None,
},
),
]
@ -5300,7 +5210,6 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
expression,
start: _,
end: _,
digest: None,
}) = program.body.first().unwrap()
else {
panic!("expected a function!");

View File

@ -828,7 +828,7 @@ mod tests {
assert_eq!(
some_function,
crate::ast::types::Function::StdLib {
func: Box::new(crate::std::sketch::Line),
func: Box::new(crate::std::sketch::Line)
}
);
}

View File

@ -110,6 +110,8 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
}
/// Send the modeling cmd and wait for the response.
// TODO: This should only borrow `cmd`.
// See https://github.com/KittyCAD/modeling-app/issues/2821
async fn send_modeling_cmd(
&self,
id: uuid::Uuid,

View File

@ -142,7 +142,7 @@ impl IntoDiagnostic for KclError {
Diagnostic {
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
severity: Some(DiagnosticSeverity::ERROR),
severity: Some(self.severity()),
code: None,
// TODO: this is neat we can pass a URL to a help page here for this specific error.
code_description: None,
@ -153,6 +153,10 @@ impl IntoDiagnostic for KclError {
data: None,
}
}
fn severity(&self) -> DiagnosticSeverity {
DiagnosticSeverity::ERROR
}
}
/// This is different than to_string() in that it will serialize the Error

View File

@ -16,7 +16,7 @@ use crate::{
errors::{KclError, KclErrorDetails},
fs::FileManager,
settings::types::UnitLength,
std::{FunctionKind, StdLib},
std::{FnAsArg, FunctionKind, StdLib},
};
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
@ -640,6 +640,52 @@ impl MemoryItem {
.map(Some)
}
fn as_user_val(&self) -> Option<&UserVal> {
if let MemoryItem::UserVal(x) = self {
Some(x)
} else {
None
}
}
/// If this value is of type u32, return it.
pub fn get_u32(&self, source_ranges: Vec<SourceRange>) -> Result<u32, KclError> {
let err = KclError::Semantic(KclErrorDetails {
message: "Expected an integer >= 0".to_owned(),
source_ranges,
});
self.as_user_val()
.and_then(|uv| uv.value.as_number())
.and_then(|n| n.as_u64())
.and_then(|n| u32::try_from(n).ok())
.ok_or(err)
}
/// If this value is of type function, return it.
pub fn get_function(&self, source_ranges: Vec<SourceRange>) -> Result<FnAsArg<'_>, KclError> {
let MemoryItem::Function {
func,
expression,
meta: _,
} = &self
else {
return Err(KclError::Semantic(KclErrorDetails {
message: "not an in-memory function".to_string(),
source_ranges,
}));
};
let func = func.as_ref().ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!("Not an in-memory function: {:?}", expression),
source_ranges,
})
})?;
Ok(FnAsArg {
func,
expr: expression.to_owned(),
})
}
/// Backwards compatibility for getting a tag from a memory item.
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
match self {
@ -668,7 +714,6 @@ impl MemoryItem {
name,
start: u.meta[0].source_range.start(),
end: u.meta[0].source_range.end(),
digest: None,
})
}
_ => Err(KclError::Semantic(KclErrorDetails {
@ -688,7 +733,6 @@ impl MemoryItem {
name,
start: u.meta[0].source_range.start(),
end: u.meta[0].source_range.end(),
digest: None,
}))
} else {
Ok(None)
@ -1457,7 +1501,7 @@ impl ExecutorContext {
/// Kurt uses this for partial execution.
pub async fn run(
&self,
program: crate::ast::types::Program,
program: &crate::ast::types::Program,
memory: Option<ProgramMemory>,
) -> Result<ProgramMemory, KclError> {
// Before we even start executing the program, set the units.
@ -1483,7 +1527,7 @@ impl ExecutorContext {
#[async_recursion]
pub(crate) async fn inner_execute(
&self,
program: crate::ast::types::Program,
program: &crate::ast::types::Program,
memory: &mut ProgramMemory,
body_type: BodyType,
) -> Result<ProgramMemory, KclError> {
@ -1515,9 +1559,7 @@ impl ExecutorContext {
}
FunctionKind::Std(func) => {
let mut newmem = memory.clone();
let result = self
.inner_execute(func.program().to_owned(), &mut newmem, BodyType::Block)
.await?;
let result = self.inner_execute(func.program(), &mut newmem, BodyType::Block).await?;
memory.return_ = result.return_;
}
FunctionKind::UserDefined => {
@ -1653,7 +1695,7 @@ impl ExecutorContext {
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
let result = ctx
.inner_execute(function_expression.body.clone(), &mut fn_memory, BodyType::Block)
.inner_execute(&function_expression.body, &mut fn_memory, BodyType::Block)
.await?;
Ok((result.return_, fn_memory.get_tags()))
@ -1703,7 +1745,7 @@ impl ExecutorContext {
}
/// Execute the program, then get a PNG screenshot.
pub async fn execute_and_prepare_snapshot(&self, program: Program) -> Result<kittycad::types::TakeSnapshot> {
pub async fn execute_and_prepare_snapshot(&self, program: &Program) -> Result<kittycad::types::TakeSnapshot> {
let _ = self.run(program, None).await?;
// Zoom to fit.
@ -1820,7 +1862,7 @@ mod tests {
settings: Default::default(),
is_mock: true,
};
let memory = ctx.run(program, None).await?;
let memory = ctx.run(&program, None).await?;
Ok(memory)
}
@ -2339,7 +2381,6 @@ const bracket = startSketchOn('XY')
start: 0,
end: 0,
name: s.to_owned(),
digest: None,
}
}
fn opt_param(s: &'static str) -> Parameter {
@ -2347,7 +2388,6 @@ const bracket = startSketchOn('XY')
identifier: ident(s),
type_: None,
optional: true,
digest: None,
}
}
fn req_param(s: &'static str) -> Parameter {
@ -2355,7 +2395,6 @@ const bracket = startSketchOn('XY')
identifier: ident(s),
type_: None,
optional: false,
digest: None,
}
}
fn additional_program_memory(items: &[(String, MemoryItem)]) -> ProgramMemory {
@ -2439,10 +2478,8 @@ const bracket = startSketchOn('XY')
end: 0,
body: Vec::new(),
non_code_meta: Default::default(),
digest: None,
},
return_type: None,
digest: None,
};
let actual = assign_args_to_params(func_expr, args, ProgramMemory::new());
assert_eq!(

View File

@ -0,0 +1,45 @@
use std::collections::HashMap;
use schemars::JsonSchema;
use crate::{
ast::types::FunctionExpression,
errors::KclError,
executor::{ExecutorContext, MemoryFunction, MemoryItem, Metadata, ProgramMemory, ProgramReturn},
};
/// A function being used as a parameter into a stdlib function.
pub struct FunctionParam<'a> {
pub inner: &'a MemoryFunction,
pub memory: ProgramMemory,
pub fn_expr: Box<FunctionExpression>,
pub meta: Vec<Metadata>,
pub ctx: ExecutorContext,
}
impl<'a> FunctionParam<'a> {
pub async fn call(
&self,
args: Vec<MemoryItem>,
) -> Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError> {
(self.inner)(
args,
self.memory.clone(),
self.fn_expr.clone(),
self.meta.clone(),
self.ctx.clone(),
)
.await
}
}
impl<'a> JsonSchema for FunctionParam<'a> {
fn schema_name() -> String {
"FunctionParam".to_owned()
}
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
// TODO: Actually generate a reasonable schema.
gen.subschema_for::<()>()
}
}

View File

@ -20,6 +20,7 @@ pub mod engine;
pub mod errors;
pub mod executor;
pub mod fs;
mod function_param;
pub mod lint;
pub mod lsp;
pub mod parser;

View File

@ -65,7 +65,11 @@ mod tests {
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
}
test_finding!(z0001_full_bad, lint_variables, Z0001, "\
test_finding!(
z0001_full_bad,
lint_variables,
Z0001,
"\
// Define constants
const pipeLength = 40
const pipeSmallDia = 10
@ -94,9 +98,14 @@ const Part001 = startSketchOn('XY')
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|> close(%)
|> revolve({ axis: 'y' }, %)
");
"
);
test_no_finding!(z0001_full_good, lint_variables, Z0001, "\
test_no_finding!(
z0001_full_good,
lint_variables,
Z0001,
"\
// Define constants
const pipeLength = 40
const pipeSmallDia = 10
@ -125,5 +134,6 @@ const part001 = startSketchOn('XY')
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|> close(%)
|> revolve({ axis: 'y' }, %)
");
"
);
}

View File

@ -70,6 +70,10 @@ impl IntoDiagnostic for Discovered {
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
(&self).to_lsp_diagnostic(code)
}
fn severity(&self) -> DiagnosticSeverity {
(&self).severity()
}
}
impl IntoDiagnostic for &Discovered {
@ -79,7 +83,7 @@ impl IntoDiagnostic for &Discovered {
Diagnostic {
range: source_range.to_lsp_range(code),
severity: Some(DiagnosticSeverity::INFORMATION),
severity: Some(self.severity()),
code: None,
// TODO: this is neat we can pass a URL to a help page here for this specific error.
code_description: None,
@ -90,6 +94,10 @@ impl IntoDiagnostic for &Discovered {
data: None,
}
}
fn severity(&self) -> DiagnosticSeverity {
DiagnosticSeverity::INFORMATION
}
}
/// Abstract lint problem type.

View File

@ -3,59 +3,15 @@
use std::sync::Arc;
use anyhow::Result;
use tokio::sync::RwLock;
use dashmap::DashMap;
use tower_lsp::lsp_types::{
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializedParams, MessageType,
RenameFilesParams, TextDocumentItem, WorkspaceFolder,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializedParams, MessageType, RenameFilesParams,
TextDocumentItem, WorkspaceFolder,
};
use crate::{
fs::FileSystem,
lsp::safemap::SafeMap,
thread::{JoinHandle, Thread},
};
#[derive(Clone)]
pub struct InnerHandle(Arc<JoinHandle>);
impl InnerHandle {
pub fn new(handle: JoinHandle) -> Self {
Self(Arc::new(handle))
}
pub fn is_finished(&self) -> bool {
self.0.is_finished()
}
pub fn cancel(&self) {
self.0.abort();
}
}
#[derive(Clone)]
pub struct UpdateHandle(Arc<RwLock<Option<InnerHandle>>>);
impl UpdateHandle {
pub fn new(handle: InnerHandle) -> Self {
Self(Arc::new(RwLock::new(Some(handle))))
}
pub async fn read(&self) -> Option<InnerHandle> {
self.0.read().await.clone()
}
pub async fn write(&self, handle: Option<InnerHandle>) {
*self.0.write().await = handle;
}
}
impl Default for UpdateHandle {
fn default() -> Self {
Self(Arc::new(RwLock::new(None)))
}
}
use crate::fs::FileSystem;
/// A trait for the backend of the language server.
#[async_trait::async_trait]
@ -63,18 +19,14 @@ pub trait Backend: Clone + Send + Sync
where
Self: 'static,
{
fn client(&self) -> tower_lsp::Client;
fn client(&self) -> &tower_lsp::Client;
fn fs(&self) -> Arc<crate::fs::FileManager>;
fn fs(&self) -> &Arc<crate::fs::FileManager>;
async fn is_initialized(&self) -> bool;
async fn set_is_initialized(&self, is_initialized: bool);
async fn current_handle(&self) -> Option<InnerHandle>;
async fn set_current_handle(&self, handle: Option<InnerHandle>);
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
@ -82,7 +34,7 @@ where
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
/// Get the current code map.
fn code_map(&self) -> SafeMap<String, Vec<u8>>;
fn code_map(&self) -> &DashMap<String, Vec<u8>>;
/// Insert a new code map.
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
@ -94,62 +46,36 @@ where
async fn clear_code_state(&self);
/// Get the current diagnostics map.
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport>;
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>>;
/// On change event.
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
/// Check if the file has diagnostics.
async fn has_diagnostics(&self, uri: &str) -> bool {
if let Some(tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics)) =
self.current_diagnostics_map().get(uri).await
{
!diagnostics.full_document_diagnostic_report.items.is_empty()
} else {
false
}
let Some(diagnostics) = self.current_diagnostics_map().get(uri) else {
return false;
};
!diagnostics.is_empty()
}
async fn on_change(&self, params: TextDocumentItem) {
// Check if the document is in the current code map and if it is the same as what we have
// stored.
let filename = params.uri.to_string();
if let Some(current_code) = self.code_map().get(&filename).await {
if current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
if let Some(current_code) = self.code_map().get(&filename) {
if *current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
return;
}
}
// Check if we already have a handle running.
if let Some(current_handle) = self.current_handle().await {
self.set_current_handle(None).await;
// Drop that handle to cancel it.
current_handle.cancel();
}
println!("on_change after check: {:?}", params);
let cloned = self.clone();
let task = JoinHandle::new(async move {
cloned
.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
.await;
cloned.inner_on_change(params, false).await;
cloned.set_current_handle(None).await;
});
let update_handle = InnerHandle::new(task);
// Set our new handle.
self.set_current_handle(Some(update_handle.clone())).await;
}
async fn wait_on_handle(&self) {
while let Some(handle) = self.current_handle().await {
if !handle.is_finished() {
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
} else {
break;
}
}
self.set_current_handle(None).await;
self.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
.await;
println!("on_change after insert: {:?}", params);
self.inner_on_change(params, false).await;
}
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
@ -211,7 +137,7 @@ where
self.remove_workspace_folders(params.event.removed).await;
// Remove the code from the current code map.
// We do this since it means the user is changing projects so let's refresh the state.
if !self.code_map().is_empty().await && should_clear {
if !self.code_map().is_empty() && should_clear {
self.clear_code_state().await;
}
for added in params.event.added {

View File

@ -9,28 +9,27 @@ use std::{
sync::{Arc, RwLock},
};
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use tower_lsp::{
jsonrpc::{Error, Result},
lsp_types::{
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializeParams,
InitializeResult, InitializedParams, MessageType, OneOf, RenameFilesParams, ServerCapabilities,
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializeResult, InitializedParams,
MessageType, OneOf, RenameFilesParams, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
},
LanguageServer,
};
use super::backend::{InnerHandle, UpdateHandle};
use crate::lsp::{
backend::Backend as _,
copilot::types::{
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
},
safemap::SafeMap,
};
#[derive(Deserialize, Serialize, Debug)]
@ -50,9 +49,9 @@ pub struct Backend {
/// The file system client to use.
pub fs: Arc<crate::fs::FileManager>,
/// The workspace folders.
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
pub workspace_folders: DashMap<String, WorkspaceFolder>,
/// Current code.
pub code_map: SafeMap<String, Vec<u8>>,
pub code_map: DashMap<String, Vec<u8>>,
/// The Zoo API client.
pub zoo_client: kittycad::Client,
/// The editor info is used to store information about the editor.
@ -60,21 +59,22 @@ pub struct Backend {
/// The cache is used to store the results of previous requests.
pub cache: Arc<cache::CopilotCache>,
/// Storage so we can send telemetry data back out.
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
pub telemetry: DashMap<uuid::Uuid, CopilotCompletionTelemetry>,
/// Diagnostics.
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
pub current_handle: UpdateHandle,
}
// Implement the shared backend trait for the language server.
#[async_trait::async_trait]
impl crate::lsp::backend::Backend for Backend {
fn client(&self) -> tower_lsp::Client {
self.client.clone()
fn client(&self) -> &tower_lsp::Client {
&self.client
}
fn fs(&self) -> Arc<crate::fs::FileManager> {
self.fs.clone()
fn fs(&self) -> &Arc<crate::fs::FileManager> {
&self.fs
}
async fn is_initialized(&self) -> bool {
@ -85,48 +85,41 @@ impl crate::lsp::backend::Backend for Backend {
*self.is_initialized.write().await = is_initialized;
}
async fn current_handle(&self) -> Option<InnerHandle> {
self.current_handle.read().await
}
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
self.current_handle.write(handle).await;
}
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.inner().await.values().cloned().collect()
// TODO: fix clone
self.workspace_folders.iter().map(|i| i.clone()).collect()
}
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.insert(folder.name.to_string(), folder).await;
self.workspace_folders.insert(folder.name.to_string(), folder);
}
}
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.remove(&folder.name).await;
self.workspace_folders.remove(&folder.name);
}
}
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
self.code_map.clone()
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
&self.code_map
}
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
self.code_map.insert(uri, text).await;
self.code_map.insert(uri, text);
}
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
self.code_map.remove(&uri).await
self.code_map.remove(&uri).map(|(_, v)| v)
}
async fn clear_code_state(&self) {
self.code_map.clear().await;
self.code_map.clear();
}
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
Default::default()
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
&self.diagnostics_map
}
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
@ -138,8 +131,15 @@ impl Backend {
/// Get completions from the kittycad api.
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
let body = kittycad::types::KclCodeCompletionRequest {
prompt: Some(prompt.clone()),
suffix: Some(suffix.clone()),
extra: Some(kittycad::types::KclCodeCompletionParams {
language: Some(language.to_string()),
next_indent: None,
trim_by_indentation: true,
prompt_tokens: Some(prompt.len() as u32),
suffix_tokens: Some(suffix.len() as u32),
}),
prompt: Some(prompt),
suffix: Some(suffix),
max_tokens: Some(500),
temperature: Some(1.0),
top_p: Some(1.0),
@ -149,13 +149,6 @@ impl Backend {
nwo: None,
// We haven't implemented streaming yet.
stream: false,
extra: Some(kittycad::types::KclCodeCompletionParams {
language: Some(language.to_string()),
next_indent: None,
trim_by_indentation: true,
prompt_tokens: Some(prompt.len() as u32),
suffix_tokens: Some(suffix.len() as u32),
}),
};
let resp = self
@ -234,7 +227,7 @@ impl Backend {
completion: completion.clone(),
params: params.clone(),
};
self.telemetry.insert(completion.uuid, telemetry).await;
self.telemetry.insert(completion.uuid, telemetry);
}
self.cache
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
@ -248,7 +241,7 @@ impl Backend {
.await;
// Get the original telemetry data.
let Some(original) = self.telemetry.remove(&params.uuid).await else {
let Some(original) = self.telemetry.remove(&params.uuid) else {
return;
};
@ -267,7 +260,7 @@ impl Backend {
// Get the original telemetry data.
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
for uuid in params.uuids {
if let Some(original) = self.telemetry.remove(&uuid).await {
if let Some(original) = self.telemetry.remove(&uuid).map(|(_, v)| v) {
originals.push(original);
}
}
@ -340,7 +333,7 @@ impl LanguageServer for Backend {
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.do_did_change(params.clone()).await;
self.do_did_change(params).await;
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {

View File

@ -14,12 +14,13 @@ pub mod custom_notifications;
use anyhow::Result;
#[cfg(feature = "cli")]
use clap::Parser;
use dashmap::DashMap;
use sha2::Digest;
use tower_lsp::{
jsonrpc::Result as RpcResult,
lsp_types::{
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
DeleteFilesParams, Diagnostic, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
@ -43,11 +44,7 @@ use crate::lint::checks;
use crate::{
ast::types::{Value, VariableKind},
executor::SourceRange,
lsp::{
backend::{Backend as _, InnerHandle, UpdateHandle},
safemap::SafeMap,
util::IntoDiagnostic,
},
lsp::{backend::Backend as _, util::IntoDiagnostic},
parser::PIPE_OPERATOR,
token::TokenType,
};
@ -68,6 +65,9 @@ lazy_static::lazy_static! {
vec![
SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::DEFAULT_LIBRARY,
SemanticTokenModifier::READONLY,
SemanticTokenModifier::STATIC,
]
};
}
@ -93,25 +93,25 @@ pub struct Backend {
/// The file system client to use.
pub fs: Arc<crate::fs::FileManager>,
/// The workspace folders.
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
pub workspace_folders: DashMap<String, WorkspaceFolder>,
/// The stdlib completions for the language.
pub stdlib_completions: HashMap<String, CompletionItem>,
/// The stdlib signatures for the language.
pub stdlib_signatures: HashMap<String, SignatureHelp>,
/// Token maps.
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
pub token_map: DashMap<String, Vec<crate::token::Token>>,
/// AST maps.
pub ast_map: SafeMap<String, crate::ast::types::Program>,
pub ast_map: DashMap<String, crate::ast::types::Program>,
/// Memory maps.
pub memory_map: SafeMap<String, crate::executor::ProgramMemory>,
pub memory_map: DashMap<String, crate::executor::ProgramMemory>,
/// Current code.
pub code_map: SafeMap<String, Vec<u8>>,
pub code_map: DashMap<String, Vec<u8>>,
/// Diagnostics.
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
/// Symbols map.
pub symbols_map: SafeMap<String, Vec<DocumentSymbol>>,
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
/// Semantic tokens map.
pub semantic_tokens_map: SafeMap<String, Vec<SemanticToken>>,
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
/// The Zoo API client.
pub zoo_client: kittycad::Client,
/// If we can send telemetry for this user.
@ -122,18 +122,17 @@ pub struct Backend {
pub can_execute: Arc<RwLock<bool>>,
pub is_initialized: Arc<RwLock<bool>>,
pub current_handle: UpdateHandle,
}
// Implement the shared backend trait for the language server.
#[async_trait::async_trait]
impl crate::lsp::backend::Backend for Backend {
fn client(&self) -> Client {
self.client.clone()
fn client(&self) -> &Client {
&self.client
}
fn fs(&self) -> Arc<crate::fs::FileManager> {
self.fs.clone()
fn fs(&self) -> &Arc<crate::fs::FileManager> {
&self.fs
}
async fn is_initialized(&self) -> bool {
@ -144,84 +143,76 @@ impl crate::lsp::backend::Backend for Backend {
*self.is_initialized.write().await = is_initialized;
}
async fn current_handle(&self) -> Option<InnerHandle> {
self.current_handle.read().await
}
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
self.current_handle.write(handle).await;
}
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.inner().await.values().cloned().collect()
// TODO: fix clone
self.workspace_folders.iter().map(|i| i.clone()).collect()
}
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.insert(folder.name.to_string(), folder).await;
self.workspace_folders.insert(folder.name.to_string(), folder);
}
}
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.remove(&folder.name).await;
self.workspace_folders.remove(&folder.name);
}
}
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
self.code_map.clone()
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
&self.code_map
}
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
self.code_map.insert(uri, text).await;
self.code_map.insert(uri, text);
}
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
self.code_map.remove(&uri).await
self.code_map.remove(&uri).map(|x| x.1)
}
async fn clear_code_state(&self) {
self.code_map.clear().await;
self.token_map.clear().await;
self.ast_map.clear().await;
self.diagnostics_map.clear().await;
self.symbols_map.clear().await;
self.semantic_tokens_map.clear().await;
self.code_map.clear();
self.token_map.clear();
self.ast_map.clear();
self.diagnostics_map.clear();
self.symbols_map.clear();
self.semantic_tokens_map.clear();
}
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
self.diagnostics_map.clone()
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
&self.diagnostics_map
}
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
let filename = params.uri.to_string();
// We already updated the code map in the shared backend.
// Lets update the tokens.
let tokens = match crate::token::lexer(&params.text) {
Ok(tokens) => tokens,
Err(err) => {
self.add_to_diagnostics(&params, err, true).await;
self.token_map.remove(&params.uri.to_string()).await;
self.ast_map.remove(&params.uri.to_string()).await;
self.symbols_map.remove(&params.uri.to_string()).await;
self.semantic_tokens_map.remove(&params.uri.to_string()).await;
self.memory_map.remove(&params.uri.to_string()).await;
self.add_to_diagnostics(&params, &[err], true).await;
self.token_map.remove(&filename);
self.ast_map.remove(&filename);
self.symbols_map.remove(&filename);
self.semantic_tokens_map.remove(&filename);
self.memory_map.remove(&filename);
return;
}
};
// Get the previous tokens.
let previous_tokens = self.token_map.get(&params.uri.to_string()).await;
// Try to get the memory for the current code.
let has_memory = if let Some(memory) = self.memory_map.get(&params.uri.to_string()).await {
memory != crate::executor::ProgramMemory::default()
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
*memory != crate::executor::ProgramMemory::default()
} else {
false
};
let tokens_changed = if let Some(previous_tokens) = previous_tokens.clone() {
previous_tokens != tokens
// Get the previous tokens.
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
*previous_tokens != tokens
} else {
true
};
@ -234,9 +225,9 @@ impl crate::lsp::backend::Backend for Backend {
if tokens_changed {
// Update our token map.
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
self.token_map.insert(params.uri.to_string(), tokens.clone());
// Update our semantic tokens.
self.update_semantic_tokens(tokens.clone(), &params).await;
self.update_semantic_tokens(&tokens, &params).await;
}
// Lets update the ast.
@ -245,19 +236,19 @@ impl crate::lsp::backend::Backend for Backend {
let ast = match result {
Ok(ast) => ast,
Err(err) => {
self.add_to_diagnostics(&params, err, true).await;
self.ast_map.remove(&params.uri.to_string()).await;
self.symbols_map.remove(&params.uri.to_string()).await;
self.memory_map.remove(&params.uri.to_string()).await;
self.add_to_diagnostics(&params, &[err], true).await;
self.ast_map.remove(&filename);
self.symbols_map.remove(&filename);
self.memory_map.remove(&filename);
return;
}
};
// Check if the ast changed.
let ast_changed = match self.ast_map.get(&params.uri.to_string()).await {
let ast_changed = match self.ast_map.get(&filename) {
Some(old_ast) => {
// Check if the ast changed.
old_ast != ast
*old_ast != ast
}
None => true,
};
@ -268,17 +259,15 @@ impl crate::lsp::backend::Backend for Backend {
}
if ast_changed {
self.ast_map.insert(params.uri.to_string(), ast.clone()).await;
self.ast_map.insert(params.uri.to_string(), ast.clone());
// Update the symbols map.
self.symbols_map
.insert(
params.uri.to_string(),
ast.get_lsp_symbols(&params.text).unwrap_or_default(),
)
.await;
self.symbols_map.insert(
params.uri.to_string(),
ast.get_lsp_symbols(&params.text).unwrap_or_default(),
);
// Update our semantic tokens.
self.update_semantic_tokens(tokens, &params).await;
self.update_semantic_tokens(&tokens, &params).await;
#[cfg(not(target_arch = "wasm32"))]
{
@ -287,12 +276,7 @@ impl crate::lsp::backend::Backend for Backend {
.into_iter()
.flatten()
.collect::<Vec<_>>();
// Clear the lints before we lint.
self.clear_diagnostics_map(&params.uri, Some(DiagnosticSeverity::INFORMATION))
.await;
for discovered_finding in &discovered_findings {
self.add_to_diagnostics(&params, discovered_finding, false).await;
}
self.add_to_diagnostics(&params, &discovered_findings, false).await;
}
}
@ -308,7 +292,7 @@ impl crate::lsp::backend::Backend for Backend {
// Execute the code if we have an executor context.
// This function automatically executes if we should & updates the diagnostics if we got
// errors.
if self.execute(&params, ast.clone()).await.is_err() {
if self.execute(&params, &ast).await.is_err() {
return;
}
@ -323,35 +307,22 @@ impl Backend {
*self.can_execute.read().await
}
async fn set_can_execute(&self, can_execute: bool) {
*self.can_execute.write().await = can_execute;
pub async fn executor_ctx(&self) -> tokio::sync::RwLockReadGuard<'_, Option<crate::executor::ExecutorContext>> {
self.executor_ctx.read().await
}
pub async fn executor_ctx(&self) -> Option<crate::executor::ExecutorContext> {
self.executor_ctx.read().await.clone()
}
async fn set_executor_ctx(&self, executor_ctx: crate::executor::ExecutorContext) {
*self.executor_ctx.write().await = Some(executor_ctx);
}
async fn update_semantic_tokens(&self, tokens: Vec<crate::token::Token>, params: &TextDocumentItem) {
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
// Update the semantic tokens map.
let mut semantic_tokens = vec![];
let mut last_position = Position::new(0, 0);
for token in &tokens {
let Ok(mut token_type) = SemanticTokenType::try_from(token.token_type) else {
for token in tokens {
let Ok(token_type) = SemanticTokenType::try_from(token.token_type) else {
// We continue here because not all tokens can be converted this way, we will get
// the rest from the ast.
continue;
};
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
// This is a stdlib function.
token_type = SemanticTokenType::FUNCTION;
}
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
let mut token_type_index = match self.get_semantic_token_type_index(&token_type) {
Some(index) => index,
// This is actually bad this should not fail.
// The test for listing all semantic token types should make this never happen.
@ -366,12 +337,12 @@ impl Backend {
}
};
let source_range: SourceRange = token.clone().into();
let source_range: SourceRange = token.into();
let position = source_range.start_to_lsp_position(&params.text);
// Calculate the token modifiers.
// Get the value at the current position.
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(&params.uri.to_string()).await {
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
let token_index = Arc::new(Mutex::new(token_type_index));
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
crate::walk::walk(&ast, &|node: crate::walk::Node| {
@ -380,7 +351,7 @@ impl Backend {
return Ok(true);
}
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
let get_modifier = |modifier: Vec<SemanticTokenModifier>| -> Result<bool> {
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
return Ok(true);
@ -395,35 +366,41 @@ impl Backend {
match node {
crate::walk::Node::TagDeclarator(_) => {
return get_modifier(SemanticTokenModifier::DEFINITION);
return get_modifier(vec![
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::STATIC,
]);
}
crate::walk::Node::VariableDeclarator(variable) => {
let sr: SourceRange = variable.id.clone().into();
let sr: SourceRange = (&variable.id).into();
if sr.contains(source_range.start()) {
if let Value::FunctionExpression(_) = &variable.init {
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
Some(index) => index,
None => token_type_index,
};
}
return get_modifier(SemanticTokenModifier::DECLARATION);
return get_modifier(vec![
SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::READONLY,
]);
}
}
crate::walk::Node::Parameter(_) => {
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PARAMETER) {
Some(index) => index,
None => token_type_index,
};
return Ok(false);
}
crate::walk::Node::MemberExpression(member_expression) => {
let sr: SourceRange = member_expression.property.clone().into();
let sr: SourceRange = (&member_expression.property).into();
if sr.contains(source_range.start()) {
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
Some(index) => index,
None => token_type_index,
};
@ -431,24 +408,30 @@ impl Backend {
}
}
crate::walk::Node::ObjectProperty(object_property) => {
let sr: SourceRange = object_property.key.clone().into();
let sr: SourceRange = (&object_property.key).into();
if sr.contains(source_range.start()) {
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
Some(index) => index,
None => token_type_index,
};
}
return get_modifier(SemanticTokenModifier::DECLARATION);
return get_modifier(vec![SemanticTokenModifier::DECLARATION]);
}
crate::walk::Node::CallExpression(call_expr) => {
let sr: SourceRange = call_expr.callee.clone().into();
let sr: SourceRange = (&call_expr.callee).into();
if sr.contains(source_range.start()) {
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
Some(index) => index,
None => token_type_index,
};
if self.stdlib_completions.contains_key(&call_expr.callee.name) {
// This is a stdlib function.
return get_modifier(vec![SemanticTokenModifier::DEFAULT_LIBRARY]);
}
return Ok(false);
}
}
@ -509,15 +492,12 @@ impl Backend {
last_position = position;
}
self.semantic_tokens_map
.insert(params.uri.to_string(), semantic_tokens)
.await;
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
}
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items.clone(),
_ => vec![],
let Some(mut items) = self.diagnostics_map.get_mut(uri.as_str()) else {
return;
};
// If we only want to clear a specific severity, do that.
@ -527,94 +507,83 @@ impl Backend {
items.clear();
}
self.diagnostics_map
.insert(
uri.to_string(),
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport {
result_id: None,
items: items.clone(),
},
}),
)
.await;
if items.is_empty() {
#[cfg(not(target_arch = "wasm32"))]
{
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
}
#[cfg(not(target_arch = "wasm32"))]
{
self.client.publish_diagnostics(uri.clone(), items, None).await;
// We need to drop the items here.
drop(items);
self.diagnostics_map.remove(uri.as_str());
} else {
// We don't need to update the map since we used get_mut.
#[cfg(not(target_arch = "wasm32"))]
{
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
}
}
}
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
&self,
params: &TextDocumentItem,
diagnostic: DiagT,
diagnostics: &[DiagT],
clear_all_before_add: bool,
) {
self.client
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostic))
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostics))
.await;
let diagnostic = diagnostic.to_lsp_diagnostic(&params.text);
if clear_all_before_add {
self.clear_diagnostics_map(&params.uri, None).await;
} else if diagnostic.severity == Some(DiagnosticSeverity::ERROR) {
} else if diagnostics.iter().all(|x| x.severity() == DiagnosticSeverity::ERROR) {
// If the diagnostic is an error, it will be the only error we get since that halts
// execution.
// Clear the diagnostics before we add a new one.
self.clear_diagnostics_map(&params.uri, Some(DiagnosticSeverity::ERROR))
.await;
} else if diagnostics
.iter()
.all(|x| x.severity() == DiagnosticSeverity::INFORMATION)
{
// If the diagnostic is a lint, we will pass them all to add at once so we need to
// clear the old ones.
self.clear_diagnostics_map(&params.uri, Some(DiagnosticSeverity::INFORMATION))
.await;
}
let DocumentDiagnosticReport::Full(mut report) = self
.diagnostics_map
.get(params.uri.clone().as_str())
.await
.unwrap_or(DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport {
result_id: None,
items: vec![],
},
}))
else {
unreachable!();
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
// TODO: Would be awesome to fix the clone here.
items.clone()
} else {
vec![]
};
// Ensure we don't already have this diagnostic.
if report
.full_document_diagnostic_report
.items
.iter()
.any(|x| x == &diagnostic)
{
self.client
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
.await;
return;
for diagnostic in diagnostics {
let d = diagnostic.to_lsp_diagnostic(&params.text);
// Make sure we don't duplicate diagnostics.
if !items.iter().any(|x| x == &d) {
items.push(d);
}
}
report.full_document_diagnostic_report.items.push(diagnostic);
self.diagnostics_map.insert(params.uri.to_string(), items.clone());
self.diagnostics_map
.insert(params.uri.to_string(), DocumentDiagnosticReport::Full(report.clone()))
.await;
self.client
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
.await;
self.client.publish_diagnostics(params.uri.clone(), items, None).await;
}
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) -> Result<()> {
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
// Check if we can execute.
if !self.can_execute().await {
return Ok(());
}
// Execute the code if we have an executor context.
let Some(executor_ctx) = self.executor_ctx().await else {
let ctx = self.executor_ctx().await;
let Some(ref executor_ctx) = *ctx else {
return Ok(());
};
@ -629,17 +598,16 @@ impl Backend {
let memory = match executor_ctx.run(ast, None).await {
Ok(memory) => memory,
Err(err) => {
self.memory_map.remove(&params.uri.to_string()).await;
self.add_to_diagnostics(params, err, false).await;
self.memory_map.remove(params.uri.as_str());
self.add_to_diagnostics(params, &[err], false).await;
// Since we already published the diagnostics we don't really care about the error
// string.
return Err(anyhow::anyhow!("failed to execute code"));
}
};
drop(executor_ctx);
self.memory_map.insert(params.uri.to_string(), memory.clone()).await;
self.memory_map.insert(params.uri.to_string(), memory.clone());
// Send the notification to the client that the memory was updated.
self.client
@ -649,22 +617,36 @@ impl Backend {
Ok(())
}
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
pub fn get_semantic_token_type_index(&self, token_type: &SemanticTokenType) -> Option<u32> {
SEMANTIC_TOKEN_TYPES
.iter()
.position(|x| *x == token_type)
.position(|x| *x == *token_type)
.map(|y| y as u32)
}
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
SEMANTIC_TOKEN_MODIFIERS
.iter()
.position(|x| *x == token_type)
.map(|y| y as u32)
pub fn get_semantic_token_modifier_index(&self, token_types: Vec<SemanticTokenModifier>) -> Option<u32> {
if token_types.is_empty() {
return None;
}
let mut modifier = None;
for token_type in token_types {
if let Some(index) = SEMANTIC_TOKEN_MODIFIERS
.iter()
.position(|x| *x == token_type)
.map(|y| y as u32)
{
modifier = match modifier {
Some(modifier) => Some(modifier | index),
None => Some(index),
};
}
}
modifier
}
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
let ast = match self.ast_map.get(file_name).await {
let ast = match self.ast_map.get(file_name) {
Some(ast) => ast,
None => return vec![],
};
@ -681,7 +663,9 @@ impl Backend {
// Collect all the file data we know.
let mut buf = vec![];
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
for (entry, value) in self.code_map.inner().await.iter() {
for code in self.code_map.iter() {
let entry = code.key();
let value = code.value();
let file_name = entry.replace("file://", "").to_string();
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
@ -717,7 +701,7 @@ impl Backend {
// Get the workspace folders.
// The key of the workspace folder is the project name.
let workspace_folders = self.workspace_folders().await;
let project_names: Vec<String> = workspace_folders.iter().map(|v| v.name.clone()).collect::<Vec<_>>();
let project_names: Vec<&str> = workspace_folders.iter().map(|v| v.name.as_str()).collect::<Vec<_>>();
// Get the first name.
let project_name = project_names
.first()
@ -764,7 +748,9 @@ impl Backend {
let filename = params.text_document.uri.to_string();
{
let Some(mut executor_ctx) = self.executor_ctx().await else {
let mut ctx = self.executor_ctx.write().await;
// Borrow the executor context mutably.
let Some(ref mut executor_ctx) = *ctx else {
self.client
.log_message(MessageType::ERROR, "no executor context set to update units for")
.await;
@ -776,8 +762,8 @@ impl Backend {
.await;
// Try to get the memory for the current code.
let has_memory = if let Some(memory) = self.memory_map.get(&filename).await {
memory != crate::executor::ProgramMemory::default()
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
*memory != crate::executor::ProgramMemory::default()
} else {
false
};
@ -792,10 +778,6 @@ impl Backend {
// Set the engine units.
executor_ctx.update_units(params.units);
// Update the locked executor context.
self.set_executor_ctx(executor_ctx.clone()).await;
drop(executor_ctx);
}
// Lock is dropped here since nested.
// This is IMPORTANT.
@ -823,20 +805,13 @@ impl Backend {
&self,
params: custom_notifications::UpdateCanExecuteParams,
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
let can_execute = self.can_execute().await;
let mut can_execute = self.can_execute.write().await;
if can_execute == params.can_execute {
if *can_execute == params.can_execute {
return Ok(custom_notifications::UpdateCanExecuteResponse {});
}
if !params.can_execute {
// Kill any in progress executions.
if let Some(current_handle) = self.current_handle().await {
current_handle.cancel();
}
}
self.set_can_execute(params.can_execute).await;
*can_execute = params.can_execute;
Ok(custom_notifications::UpdateCanExecuteResponse {})
}
@ -949,7 +924,7 @@ impl LanguageServer for Backend {
}
async fn did_change(&self, params: DidChangeTextDocumentParams) {
self.do_did_change(params.clone()).await;
self.do_did_change(params).await;
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
@ -988,7 +963,7 @@ impl LanguageServer for Backend {
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
let filename = params.text_document_position_params.text_document.uri.to_string();
let Some(current_code) = self.code_map.get(&filename).await else {
let Some(current_code) = self.code_map.get(&filename) else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -998,7 +973,7 @@ impl LanguageServer for Backend {
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
// Let's iterate over the AST and find the node that contains the cursor.
let Some(ast) = self.ast_map.get(&filename).await else {
let Some(ast) = self.ast_map.get(&filename) else {
return Ok(None);
};
@ -1031,7 +1006,11 @@ impl LanguageServer for Backend {
value: format!(
"```{}{}```\n{}",
name,
label_details.detail.clone().unwrap_or_default(),
if let Some(detail) = &label_details.detail {
detail
} else {
""
},
docs
),
}),
@ -1090,7 +1069,7 @@ impl LanguageServer for Backend {
let filename = params.text_document.uri.to_string();
// Get the current diagnostics for this file.
let Some(diagnostic) = self.diagnostics_map.get(&filename).await else {
let Some(items) = self.diagnostics_map.get(&filename) else {
// Send an empty report.
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
RelatedFullDocumentDiagnosticReport {
@ -1103,13 +1082,21 @@ impl LanguageServer for Backend {
)));
};
Ok(DocumentDiagnosticReportResult::Report(diagnostic.clone()))
Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport {
result_id: None,
items: items.clone(),
},
},
)))
}
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
let filename = params.text_document_position_params.text_document.uri.to_string();
let Some(current_code) = self.code_map.get(&filename).await else {
let Some(current_code) = self.code_map.get(&filename) else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -1119,7 +1106,7 @@ impl LanguageServer for Backend {
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
// Let's iterate over the AST and find the node that contains the cursor.
let Some(ast) = self.ast_map.get(&filename).await else {
let Some(ast) = self.ast_map.get(&filename) else {
return Ok(None);
};
@ -1153,7 +1140,7 @@ impl LanguageServer for Backend {
signature.active_parameter = Some(parameter_index);
Ok(Some(signature.clone()))
Ok(Some(signature))
}
crate::ast::types::Hover::Comment { value: _, range: _ } => {
return Ok(None);
@ -1170,7 +1157,7 @@ impl LanguageServer for Backend {
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
let filename = params.text_document.uri.to_string();
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename).await else {
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
return Ok(None);
};
@ -1183,7 +1170,7 @@ impl LanguageServer for Backend {
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
let filename = params.text_document.uri.to_string();
let Some(symbols) = self.symbols_map.get(&filename).await else {
let Some(symbols) = self.symbols_map.get(&filename) else {
return Ok(None);
};
@ -1193,7 +1180,7 @@ impl LanguageServer for Backend {
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
let filename = params.text_document.uri.to_string();
let Some(current_code) = self.code_map.get(&filename).await else {
let Some(current_code) = self.code_map.get(&filename) else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -1230,7 +1217,7 @@ impl LanguageServer for Backend {
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
let filename = params.text_document_position.text_document.uri.to_string();
let Some(current_code) = self.code_map.get(&filename).await else {
let Some(current_code) = self.code_map.get(&filename) else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -1273,7 +1260,7 @@ impl LanguageServer for Backend {
let filename = params.text_document.uri.to_string();
// Get the ast.
let Some(ast) = self.ast_map.get(&filename).await else {
let Some(ast) = self.ast_map.get(&filename) else {
return Ok(None);
};

View File

@ -3,7 +3,8 @@
pub mod backend;
pub mod copilot;
pub mod kcl;
mod safemap;
#[cfg(any(test, feature = "lsp-test-util"))]
pub mod test_util;
#[cfg(test)]
mod tests;
pub mod util;

View File

@ -1,60 +0,0 @@
//! A map type that is safe to use in a concurrent environment.
//! But also in wasm.
//! Previously, we used `dashmap::DashMap` for this purpose, but it doesn't work in wasm.
use std::{borrow::Borrow, collections::HashMap, hash::Hash, sync::Arc};
use tokio::sync::RwLock;
/// A thread-safe map type.
#[derive(Clone, Debug)]
pub struct SafeMap<K: Eq + Hash + Clone, V: Clone>(Arc<RwLock<HashMap<K, V>>>);
impl<K: Eq + Hash + Clone, V: Clone> SafeMap<K, V> {
/// Create a new empty map.
pub fn new() -> Self {
SafeMap(Arc::new(RwLock::new(HashMap::new())))
}
pub async fn len(&self) -> usize {
self.0.read().await.len()
}
pub async fn is_empty(&self) -> bool {
self.0.read().await.is_empty()
}
pub async fn clear(&self) {
self.0.write().await.clear();
}
/// Insert a key-value pair into the map.
pub async fn insert(&self, key: K, value: V) {
self.0.write().await.insert(key, value);
}
/// Get a reference to the value associated with the given key.
pub async fn get<Q>(&self, key: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
self.0.read().await.get(key).cloned()
}
/// Remove the key-value pair associated with the given key.
pub async fn remove(&self, key: &K) -> Option<V> {
self.0.write().await.remove(key)
}
/// Get a reference to the underlying map.
pub async fn inner(&self) -> HashMap<K, V> {
self.0.read().await.clone()
}
}
impl<K: Eq + Hash + Clone, V: Clone> Default for SafeMap<K, V> {
fn default() -> Self {
SafeMap::new()
}
}

View File

@ -0,0 +1,112 @@
use std::sync::{Arc, RwLock};
use anyhow::Result;
use tower_lsp::LanguageServer;
fn new_zoo_client() -> kittycad::Client {
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
let http_client = reqwest::Client::builder()
.user_agent(user_agent)
// For file conversions we need this to be long.
.timeout(std::time::Duration::from_secs(600))
.connect_timeout(std::time::Duration::from_secs(60));
let ws_client = reqwest::Client::builder()
.user_agent(user_agent)
// For file conversions we need this to be long.
.timeout(std::time::Duration::from_secs(600))
.connect_timeout(std::time::Duration::from_secs(60))
.connection_verbose(true)
.tcp_keepalive(std::time::Duration::from_secs(600))
.http1_only();
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
// Create the client.
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
// Set a local engine address if it's set.
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
client.set_base_url(addr);
}
client
}
// Create a fake kcl lsp server for testing.
pub async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
let stdlib = crate::std::StdLib::new();
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
let zoo_client = new_zoo_client();
let executor_ctx = if execute {
Some(crate::executor::ExecutorContext::new(&zoo_client, Default::default()).await?)
} else {
None
};
let can_execute = executor_ctx.is_some();
// Create the backend.
let (service, _) = tower_lsp::LspService::build(|client| crate::lsp::kcl::Backend {
client,
fs: Arc::new(crate::fs::FileManager::new()),
workspace_folders: Default::default(),
stdlib_completions,
stdlib_signatures,
token_map: Default::default(),
ast_map: Default::default(),
memory_map: Default::default(),
code_map: Default::default(),
diagnostics_map: Default::default(),
symbols_map: Default::default(),
semantic_tokens_map: Default::default(),
zoo_client,
can_send_telemetry: true,
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
can_execute: Arc::new(tokio::sync::RwLock::new(can_execute)),
is_initialized: Default::default(),
})
.custom_method("kcl/updateUnits", crate::lsp::kcl::Backend::update_units)
.custom_method("kcl/updateCanExecute", crate::lsp::kcl::Backend::update_can_execute)
.finish();
let server = service.inner();
server
.initialize(tower_lsp::lsp_types::InitializeParams::default())
.await?;
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
Ok(server.clone())
}
// Create a fake copilot lsp server for testing.
pub async fn copilot_lsp_server() -> Result<crate::lsp::copilot::Backend> {
// We don't actually need to authenticate to the backend for this test.
let zoo_client = kittycad::Client::new_from_env();
// Create the backend.
let (service, _) = tower_lsp::LspService::new(|client| crate::lsp::copilot::Backend {
client,
fs: Arc::new(crate::fs::FileManager::new()),
workspace_folders: Default::default(),
code_map: Default::default(),
zoo_client,
editor_info: Arc::new(RwLock::new(crate::lsp::copilot::types::CopilotEditorInfo::default())),
cache: Arc::new(crate::lsp::copilot::cache::CopilotCache::new()),
telemetry: Default::default(),
is_initialized: Default::default(),
diagnostics_map: Default::default(),
});
let server = service.inner();
server
.initialize(tower_lsp::lsp_types::InitializeParams::default())
.await?;
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
Ok(server.clone())
}

File diff suppressed because it is too large Load Diff

View File

@ -37,4 +37,7 @@ pub fn get_line_before(pos: Position, rope: &Rope) -> Option<String> {
pub trait IntoDiagnostic {
/// Convert the traited object to a [lsp_types::Diagnostic].
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
/// Get the severity of the diagnostic.
fn severity(&self) -> tower_lsp::lsp_types::DiagnosticSeverity;
}

View File

@ -34,7 +34,6 @@ fn evaluate(rpn: Vec<BinaryExpressionToken>) -> Result<BinaryExpression, KclErro
operator,
left,
right,
digest: None,
}))
}
BinaryExpressionToken::Operand(o) => o,
@ -130,7 +129,6 @@ mod tests {
end: 0,
value: n.into(),
raw: n.to_string(),
digest: None,
}))
}
let tests: Vec<Vec<BinaryExpressionToken>> = vec![
@ -148,7 +146,6 @@ mod tests {
operator: BinaryOperator::Sub,
left: lit(1),
right: lit(5),
digest: None,
}))
.into(),
BinaryOperator::Pow.into(),

View File

@ -87,7 +87,6 @@ fn non_code_node(i: TokenSlice) -> PResult<NonCodeNode> {
} else {
NonCodeValue::BlockComment { value, style }
},
digest: None,
}),
_ => None,
})
@ -125,7 +124,6 @@ fn non_code_node_no_leading_whitespace(i: TokenSlice) -> PResult<NonCodeNode> {
start: token.start,
end: token.end,
value,
digest: None,
})
}
})
@ -195,7 +193,6 @@ fn pipe_expression(i: TokenSlice) -> PResult<PipeExpression> {
end: values.last().unwrap().end().max(max_noncode_end),
body: values,
non_code_meta,
digest: None,
})
}
@ -216,7 +213,6 @@ fn bool_value(i: TokenSlice) -> PResult<Literal> {
end: token.end,
value: LiteralValue::Bool(value),
raw: value.to_string(),
digest: None,
})
}
@ -246,7 +242,6 @@ pub fn string_literal(i: TokenSlice) -> PResult<Literal> {
end: token.end,
value,
raw: token.value.clone(),
digest: None,
})
}
@ -279,7 +274,6 @@ pub(crate) fn unsigned_number_literal(i: TokenSlice) -> PResult<Literal> {
end: token.end,
value,
raw: token.value.clone(),
digest: None,
})
}
@ -437,7 +431,6 @@ fn shebang(i: TokenSlice) -> PResult<NonCodeNode> {
value: NonCodeValue::Shebang {
value: format!("#!{}", value),
},
digest: None,
})
}
@ -459,12 +452,7 @@ fn array(i: TokenSlice) -> PResult<ArrayExpression> {
.parse_next(i)?;
ignore_whitespace(i);
let end = close_bracket(i)?.end;
Ok(ArrayExpression {
start,
end,
elements,
digest: None,
})
Ok(ArrayExpression { start, end, elements })
}
/// Parse n..m into a vec of numbers [n, n+1, ..., m]
@ -480,18 +468,13 @@ fn integer_range(i: TokenSlice) -> PResult<Vec<Value>> {
end: token0.end,
value: num.into(),
raw: num.to_string(),
digest: None,
}))
})
.collect())
}
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
let key = identifier
.context(expected(
"the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key",
))
.parse_next(i)?;
let key = identifier.context(expected("the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key")).parse_next(i)?;
colon
.context(expected(
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
@ -508,7 +491,6 @@ fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
end: val.end(),
key,
value: val,
digest: None,
})
}
@ -524,12 +506,7 @@ fn object(i: TokenSlice) -> PResult<ObjectExpression> {
ignore_trailing_comma(i);
ignore_whitespace(i);
let end = close_brace(i)?.end;
Ok(ObjectExpression {
start,
end,
properties,
digest: None,
})
Ok(ObjectExpression { start, end, properties })
}
/// Parse the % symbol, used to substitute a curried argument from a |> (pipe).
@ -539,7 +516,6 @@ fn pipe_sub(i: TokenSlice) -> PResult<PipeSubstitution> {
Ok(PipeSubstitution {
start: token.start,
end: token.end,
digest: None,
})
} else {
Err(KclError::Syntax(KclErrorDetails {
@ -579,7 +555,6 @@ fn function_expression(i: TokenSlice) -> PResult<FunctionExpression> {
params,
body,
return_type,
digest: None,
})
}
@ -609,12 +584,9 @@ fn member_expression_subscript(i: TokenSlice) -> PResult<(LiteralIdentifier, usi
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
// This is an identifier, followed by a sequence of members (aka properties)
// First, the identifier.
let id = identifier
.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier"))
.parse_next(i)?;
let id = identifier.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier")).parse_next(i)?;
// Now a sequence of members.
let member = alt((member_expression_dot, member_expression_subscript))
.context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
let member = alt((member_expression_dot, member_expression_subscript)).context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
let mut members: Vec<_> = repeat(1.., member)
.context(expected("a sequence of at least one members/properties"))
.parse_next(i)?;
@ -630,7 +602,6 @@ fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
object: MemberObject::Identifier(Box::new(id)),
computed,
property,
digest: None,
};
// Each remaining member wraps the current member expression inside another member expression.
@ -645,7 +616,6 @@ fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
object: MemberObject::MemberExpression(Box::new(accumulated)),
computed,
property,
digest: None,
}
}))
}
@ -792,7 +762,6 @@ pub fn function_body(i: TokenSlice) -> PResult<Program> {
start: ws_token.start,
end: ws_token.end,
value: NonCodeValue::NewLine,
digest: None,
}));
}
}
@ -874,7 +843,6 @@ pub fn function_body(i: TokenSlice) -> PResult<Program> {
end,
body,
non_code_meta,
digest: None,
})
}
@ -901,7 +869,6 @@ pub fn return_stmt(i: TokenSlice) -> PResult<ReturnStatement> {
start,
end: argument.end(),
argument,
digest: None,
})
}
@ -1039,10 +1006,8 @@ fn declaration(i: TokenSlice) -> PResult<VariableDeclaration> {
end,
id,
init: val,
digest: None,
}],
kind,
digest: None,
})
}
@ -1055,7 +1020,6 @@ impl TryFrom<Token> for Identifier {
start: token.start,
end: token.end,
name: token.value,
digest: None,
})
} else {
Err(KclError::Syntax(KclErrorDetails {
@ -1086,7 +1050,6 @@ impl TryFrom<Token> for TagDeclarator {
start: token.start - 1,
end: token.end,
name: token.value,
digest: None,
})
} else {
Err(KclError::Syntax(KclErrorDetails {
@ -1141,19 +1104,9 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
// TODO: negation. Original parser doesn't support `not` yet.
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
source_ranges: token.as_source_ranges(),
message: format!(
"{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)",
token.value.as_str(),
),
})),
other => Err(KclError::Syntax(KclErrorDetails {
source_ranges: token.as_source_ranges(),
message: format!(
"{EXPECTED} but found {} which is {}",
token.value.as_str(),
other,
),
message: format!("{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)", token.value.as_str(),),
})),
other => Err(KclError::Syntax(KclErrorDetails { source_ranges: token.as_source_ranges(), message: format!("{EXPECTED} but found {} which is {}", token.value.as_str(), other,) })),
})
.context(expected("a unary expression, e.g. -x or -3"))
.parse_next(i)?;
@ -1163,7 +1116,6 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
end: argument.end(),
operator,
argument,
digest: None,
})
}
@ -1241,7 +1193,6 @@ fn expression(i: TokenSlice) -> PResult<ExpressionStatement> {
start: val.start(),
end: val.end(),
expression: val,
digest: None,
})
}
@ -1459,7 +1410,6 @@ fn parameters(i: TokenSlice) -> PResult<Vec<Parameter>> {
identifier,
type_,
optional,
digest: None,
})
})
.collect::<Result<_, _>>()
@ -1549,7 +1499,6 @@ fn fn_call(i: TokenSlice) -> PResult<CallExpression> {
start: literal.start,
end: literal.end,
name: name.to_string(),
digest: None,
};
let tag = tag
.into_valid_binding_name()
@ -1588,7 +1537,6 @@ fn fn_call(i: TokenSlice) -> PResult<CallExpression> {
start: literal.start,
end: literal.end,
name: name.to_string(),
digest: None,
};
// Replace the literal with the tag.
@ -1616,7 +1564,6 @@ fn fn_call(i: TokenSlice) -> PResult<CallExpression> {
callee: fn_name,
arguments: args,
optional: false,
digest: None,
})
}
@ -1727,7 +1674,7 @@ const mySk1 = startSketchAt([0, 0])"#;
start0.value,
NonCodeValue::BlockComment {
value: "comment at start".to_owned(),
style: CommentStyle::Block,
style: CommentStyle::Block
}
);
assert_eq!(start1.value, NonCodeValue::NewLine);
@ -1792,25 +1739,19 @@ const mySk1 = startSketchAt([0, 0])"#;
start: 32,
end: 33,
value: 2u32.into(),
raw: "2".to_owned(),
digest: None,
})),
digest: None,
raw: "2".to_owned()
}))
})],
non_code_meta: NonCodeMeta {
non_code_nodes: Default::default(),
start: vec![NonCodeNode {
start: 7,
end: 25,
value: NonCodeValue::NewLine,
digest: None
}],
digest: None,
value: NonCodeValue::NewLine
}]
},
digest: None,
},
return_type: None,
digest: None,
}
);
}
@ -1832,7 +1773,7 @@ const mySk1 = startSketchAt([0, 0])"#;
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
NonCodeValue::InlineComment {
value: "inline-comment".to_owned(),
style: CommentStyle::Line,
style: CommentStyle::Line
}
);
assert_eq!(body.len(), 4);
@ -1857,9 +1798,8 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 20,
value: NonCodeValue::BlockComment {
value: "this is a comment".to_owned(),
style: CommentStyle::Line,
},
digest: None,
style: CommentStyle::Line
}
}],
non_code_meta.start,
);
@ -1870,15 +1810,13 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 82,
value: NonCodeValue::InlineComment {
value: "block\n comment".to_owned(),
style: CommentStyle::Block,
},
digest: None,
style: CommentStyle::Block
}
},
NonCodeNode {
start: 82,
end: 86,
value: NonCodeValue::NewLine,
digest: None,
value: NonCodeValue::NewLine
},
]),
non_code_meta.non_code_nodes.get(&0),
@ -1889,9 +1827,8 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 129,
value: NonCodeValue::BlockComment {
value: "this is also a comment".to_owned(),
style: CommentStyle::Line,
},
digest: None,
style: CommentStyle::Line
}
}]),
non_code_meta.non_code_nodes.get(&1),
);
@ -1910,7 +1847,7 @@ const mySk1 = startSketchAt([0, 0])"#;
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
NonCodeValue::InlineComment {
value: "block\n comment".to_owned(),
style: CommentStyle::Block,
style: CommentStyle::Block
}
);
}
@ -1958,8 +1895,7 @@ const mySk1 = startSketchAt([0, 0])"#;
start: 9,
end: 10,
value: 3u32.into(),
raw: "3".to_owned(),
digest: None,
raw: "3".to_owned()
}))
);
}
@ -2093,7 +2029,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hi".to_owned(),
style: CommentStyle::Line,
},
digest: None,
},
),
(
@ -2105,7 +2040,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2117,7 +2051,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2129,7 +2062,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2142,7 +2074,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2157,7 +2088,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2172,7 +2102,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "hello".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
(
@ -2185,7 +2114,6 @@ const mySk1 = startSketchAt([0, 0])"#;
value: "block\n comment".to_owned(),
style: CommentStyle::Block,
},
digest: None,
},
),
]
@ -2329,22 +2257,18 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 1,
value: 5u32.into(),
raw: "5".to_owned(),
digest: None,
})),
right: BinaryPart::Literal(Box::new(Literal {
start: 4,
end: 7,
value: "a".into(),
raw: r#""a""#.to_owned(),
digest: None,
})),
digest: None,
};
let expected = vec![BodyItem::ExpressionStatement(ExpressionStatement {
start: 0,
end: 7,
expression: Value::BinaryExpression(Box::new(expr)),
digest: None,
})];
assert_eq!(expected, actual);
}
@ -2446,7 +2370,6 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 1,
value: 5u32.into(),
raw: "5".to_string(),
digest: None,
})),
operator: BinaryOperator::Add,
right: BinaryPart::Literal(Box::new(Literal {
@ -2454,14 +2377,10 @@ const mySk1 = startSketchAt([0, 0])"#;
end: 4,
value: 6u32.into(),
raw: "6".to_string(),
digest: None,
})),
digest: None,
})),
digest: None,
})],
non_code_meta: NonCodeMeta::default(),
digest: None,
};
assert_eq!(result, expected_result);
@ -2730,11 +2649,9 @@ e
start: 0,
end: 0,
name: "a".to_owned(),
digest: None,
},
type_: None,
optional: true,
digest: None,
}],
true,
),
@ -2744,11 +2661,9 @@ e
start: 0,
end: 0,
name: "a".to_owned(),
digest: None,
},
type_: None,
optional: false,
digest: None,
}],
true,
),
@ -2759,22 +2674,18 @@ e
start: 0,
end: 0,
name: "a".to_owned(),
digest: None,
},
type_: None,
optional: false,
digest: None,
},
Parameter {
identifier: Identifier {
start: 0,
end: 0,
name: "b".to_owned(),
digest: None,
},
type_: None,
optional: true,
digest: None,
},
],
true,
@ -2786,22 +2697,18 @@ e
start: 0,
end: 0,
name: "a".to_owned(),
digest: None,
},
type_: None,
optional: true,
digest: None,
},
Parameter {
identifier: Identifier {
start: 0,
end: 0,
name: "b".to_owned(),
digest: None,
},
type_: None,
optional: false,
digest: None,
},
],
false,
@ -2833,7 +2740,6 @@ e
start: 6,
end: 13,
name: "myArray".to_string(),
digest: None,
},
init: Value::ArrayExpression(Box::new(ArrayExpression {
start: 16,
@ -2844,88 +2750,73 @@ e
end: 18,
value: 0u32.into(),
raw: "0".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 1u32.into(),
raw: "1".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 2u32.into(),
raw: "2".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 3u32.into(),
raw: "3".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 4u32.into(),
raw: "4".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 5u32.into(),
raw: "5".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 6u32.into(),
raw: "6".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 7u32.into(),
raw: "7".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 8u32.into(),
raw: "8".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 9u32.into(),
raw: "9".to_string(),
digest: None,
})),
Value::Literal(Box::new(Literal {
start: 17,
end: 18,
value: 10u32.into(),
raw: "10".to_string(),
digest: None,
})),
],
digest: None,
})),
digest: None,
}],
kind: VariableKind::Const,
digest: None,
})],
non_code_meta: NonCodeMeta::default(),
digest: None,
};
assert_eq!(result, expected_result);

View File

@ -567,7 +567,7 @@ mod tests {
project_name: Some("assembly".to_string()),
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
current_file_name: None,
current_file_path: None,
current_file_path: None
}
);
}
@ -586,7 +586,7 @@ mod tests {
project_name: None,
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
current_file_name: None,
current_file_path: None,
current_file_path: None
}
);
}
@ -624,7 +624,7 @@ mod tests {
project_name: Some("modeling-app".to_string()),
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
current_file_name: None,
current_file_path: None,
current_file_path: None
}
);
}
@ -642,7 +642,7 @@ mod tests {
project_name: Some("browser".to_string()),
project_path: "/browser".to_string(),
current_file_name: Some("main.kcl".to_string()),
current_file_path: Some("/browser/main.kcl".to_string()),
current_file_path: Some("/browser/main.kcl".to_string())
}
);
}
@ -660,7 +660,7 @@ mod tests {
project_name: Some("browser".to_string()),
project_path: "/browser".to_string(),
current_file_name: None,
current_file_path: None,
current_file_path: None
}
);
}
@ -1046,13 +1046,7 @@ const model = import("model.obj")"#
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
format!(
"File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
tmp_project_dir.join("settings.toml").display()
)
);
assert_eq!(result.unwrap_err().to_string(), format!("File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.toml").display()));
std::fs::remove_dir_all(tmp_project_dir).unwrap();
}
@ -1067,13 +1061,7 @@ const model = import("model.obj")"#
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
format!(
"File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
tmp_project_dir.join("settings.docx").display()
)
);
assert_eq!(result.unwrap_err().to_string(), format!("File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.docx").display()));
std::fs::remove_dir_all(tmp_project_dir).unwrap();
}

View File

@ -640,7 +640,7 @@ textWrapping = true
app: AppSettings {
appearance: AppearanceSettings {
theme: AppTheme::Dark,
color: Default::default(),
color: Default::default()
},
onboarding_status: OnboardingStatus::Dismissed,
project_directory: None,
@ -654,15 +654,15 @@ textWrapping = true
mouse_controls: Default::default(),
highlight_edges: Default::default(),
show_debug_panel: true,
enable_ssao: false.into(),
enable_ssao: false.into()
},
text_editor: TextEditorSettings {
text_wrapping: true.into(),
blinking_cursor: true.into(),
blinking_cursor: true.into()
},
project: Default::default(),
command_bar: CommandBarSettings {
include_settings: true.into(),
include_settings: true.into()
},
}
}
@ -698,7 +698,7 @@ includeSettings = false
app: AppSettings {
appearance: AppearanceSettings {
theme: AppTheme::Dark,
color: 138.0.into(),
color: 138.0.into()
},
onboarding_status: Default::default(),
project_directory: None,
@ -712,15 +712,15 @@ includeSettings = false
mouse_controls: Default::default(),
highlight_edges: Default::default(),
show_debug_panel: true,
enable_ssao: true.into(),
enable_ssao: true.into()
},
text_editor: TextEditorSettings {
text_wrapping: false.into(),
blinking_cursor: false.into(),
blinking_cursor: false.into()
},
project: Default::default(),
command_bar: CommandBarSettings {
include_settings: false.into(),
include_settings: false.into()
},
}
}
@ -761,7 +761,7 @@ defaultProjectName = "projects-$nnn"
app: AppSettings {
appearance: AppearanceSettings {
theme: AppTheme::Dark,
color: 138.0.into(),
color: 138.0.into()
},
onboarding_status: OnboardingStatus::Dismissed,
project_directory: None,
@ -775,18 +775,18 @@ defaultProjectName = "projects-$nnn"
mouse_controls: Default::default(),
highlight_edges: Default::default(),
show_debug_panel: true,
enable_ssao: true.into(),
enable_ssao: true.into()
},
text_editor: TextEditorSettings {
text_wrapping: false.into(),
blinking_cursor: false.into(),
blinking_cursor: false.into()
},
project: ProjectSettings {
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
default_project_name: "projects-$nnn".to_string().into(),
default_project_name: "projects-$nnn".to_string().into()
},
command_bar: CommandBarSettings {
include_settings: false.into(),
include_settings: false.into()
},
}
}
@ -836,7 +836,7 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
app: AppSettings {
appearance: AppearanceSettings {
theme: AppTheme::System,
color: Default::default(),
color: Default::default()
},
onboarding_status: OnboardingStatus::Dismissed,
project_directory: None,
@ -850,15 +850,15 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
mouse_controls: Default::default(),
highlight_edges: true.into(),
show_debug_panel: false,
enable_ssao: true.into(),
enable_ssao: true.into()
},
text_editor: TextEditorSettings {
text_wrapping: true.into(),
blinking_cursor: true.into(),
blinking_cursor: true.into()
},
project: ProjectSettings {
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
default_project_name: "project-$nnn".to_string().into(),
default_project_name: "project-$nnn".to_string().into()
},
command_bar: CommandBarSettings {
include_settings: true.into()

View File

@ -115,7 +115,7 @@ includeSettings = false
app: AppSettings {
appearance: AppearanceSettings {
theme: AppTheme::Dark,
color: 138.0.into(),
color: 138.0.into()
},
onboarding_status: Default::default(),
project_directory: None,
@ -129,14 +129,14 @@ includeSettings = false
mouse_controls: Default::default(),
highlight_edges: Default::default(),
show_debug_panel: true,
enable_ssao: true.into(),
enable_ssao: true.into()
},
text_editor: TextEditorSettings {
text_wrapping: false.into(),
blinking_cursor: false.into(),
blinking_cursor: false.into()
},
command_bar: CommandBarSettings {
include_settings: false.into(),
include_settings: false.into()
},
}
}

View File

@ -85,9 +85,9 @@ async fn inner_chamfer(
// error to the user that they can only tag one edge at a time.
if tag.is_some() && data.tags.len() > 1 {
return Err(KclError::Type(KclErrorDetails {
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
source_ranges: vec![args.source_range],
}));
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
source_ranges: vec![args.source_range],
}));
}
let mut fillet_or_chamfers = Vec::new();

View File

@ -314,10 +314,7 @@ fn get_import_format_from_extension(ext: &str) -> Result<kittycad::types::InputF
} else if ext == "glb" {
kittycad::types::FileImportFormat::Gltf
} else {
anyhow::bail!(
"unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.",
ext
)
anyhow::bail!("unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.", ext)
}
}
};

View File

@ -28,7 +28,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{
ast::types::{parse_json_number_as_f64, TagDeclarator},
ast::types::{parse_json_number_as_f64, FunctionExpression, TagDeclarator},
docs::StdLibFn,
errors::{KclError, KclErrorDetails},
executor::{
@ -85,6 +85,7 @@ lazy_static! {
Box::new(crate::std::patterns::PatternLinear3D),
Box::new(crate::std::patterns::PatternCircular2D),
Box::new(crate::std::patterns::PatternCircular3D),
Box::new(crate::std::patterns::PatternTransform),
Box::new(crate::std::chamfer::Chamfer),
Box::new(crate::std::fillet::Fillet),
Box::new(crate::std::fillet::GetOppositeEdge),
@ -351,6 +352,39 @@ impl Args {
Ok(numbers)
}
fn get_pattern_transform_args(&self) -> Result<(u32, FnAsArg<'_>, ExtrudeGroupSet), KclError> {
let sr = vec![self.source_range];
let mut args = self.args.iter();
let num_repetitions = args.next().ok_or_else(|| {
KclError::Type(KclErrorDetails {
message: "Missing first argument (should be the number of repetitions)".to_owned(),
source_ranges: sr.clone(),
})
})?;
let num_repetitions = num_repetitions.get_u32(sr.clone())?;
let transform = args.next().ok_or_else(|| {
KclError::Type(KclErrorDetails {
message: "Missing second argument (should be the transform function)".to_owned(),
source_ranges: sr.clone(),
})
})?;
let func = transform.get_function(sr.clone())?;
let eg = args.next().ok_or_else(|| {
KclError::Type(KclErrorDetails {
message: "Missing third argument (should be a Sketch/ExtrudeGroup or an array of Sketch/ExtrudeGroups)"
.to_owned(),
source_ranges: sr.clone(),
})
})?;
let eg = eg.get_extrude_group_set().map_err(|_e| {
KclError::Type(KclErrorDetails {
message: "Third argument was not an ExtrudeGroup".to_owned(),
source_ranges: sr.clone(),
})
})?;
Ok((num_repetitions, func, eg))
}
fn get_hypotenuse_leg(&self) -> Result<(f64, f64), KclError> {
let numbers = self.get_number_array()?;
@ -1242,6 +1276,11 @@ pub enum Primitive {
Uuid,
}
pub struct FnAsArg<'a> {
pub func: &'a crate::executor::MemoryFunction,
pub expr: Box<FunctionExpression>,
}
#[cfg(test)]
mod tests {
use base64::Engine;

View File

@ -8,7 +8,11 @@ use serde::{Deserialize, Serialize};
use crate::{
errors::{KclError, KclErrorDetails},
executor::{ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, SketchGroup, SketchGroupSet},
executor::{
ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, Point3d, ProgramReturn, SketchGroup,
SketchGroupSet, SourceRange, UserVal,
},
function_param::FunctionParam,
std::{types::Uint, Args},
};
@ -70,6 +74,233 @@ impl LinearPattern {
}
}
/// A linear pattern
/// Each element in the pattern repeats a particular piece of geometry.
/// The repetitions can be transformed by the `transform` parameter.
pub async fn pattern_transform(args: Args) -> Result<MemoryItem, KclError> {
let (num_repetitions, transform, extr) = args.get_pattern_transform_args()?;
let extrude_groups = inner_pattern_transform(
num_repetitions,
FunctionParam {
inner: transform.func,
fn_expr: transform.expr,
meta: vec![args.source_range.into()],
ctx: args.ctx.clone(),
memory: args.current_program_memory.clone(),
},
extr,
&args,
)
.await?;
Ok(MemoryItem::ExtrudeGroups { value: extrude_groups })
}
/// A linear pattern on a 3D solid.
/// Each repetition of the pattern can be transformed (e.g. scaled, translated, hidden, etc).
///
/// ```no_run
/// // Parameters
/// const r = 50 // base radius
/// const h = 10 // layer height
/// const t = 0.005 // taper factor [0-1)
/// // Defines how to modify each layer of the vase.
/// // Each replica is shifted up the Z axis, and has a smoothly-varying radius
/// fn transform = (replicaId) => {
/// let scale = r * abs(1 - (t * replicaId)) * (5 + cos(replicaId / 8))
/// return {
/// translate: [0, 0, replicaId * 10],
/// scale: [scale, scale, 0],
/// }
/// }
/// // Each layer is just a pretty thin cylinder.
/// fn layer = () => {
/// return startSketchOn("XY") // or some other plane idk
/// |> circle([0, 0], 1, %, 'tag1')
/// |> extrude(h, %)
/// }
/// // The vase is 100 layers tall.
/// // The 100 layers are replica of each other, with a slight transformation applied to each.
/// let vase = layer() |> patternTransform(100, transform, %)
/// ```
#[stdlib {
name = "patternTransform",
}]
async fn inner_pattern_transform<'a>(
num_repetitions: u32,
transform_function: FunctionParam<'a>,
extrude_group_set: ExtrudeGroupSet,
args: &'a Args,
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
// Build the vec of transforms, one for each repetition.
let mut transform = Vec::new();
for i in 0..num_repetitions {
let t = make_transform(i, &transform_function, args.source_range).await?;
transform.push(t);
}
// Flush the batch for our fillets/chamfers if there are any.
// If we do not flush these, then you won't be able to pattern something with fillets.
// Flush just the fillets/chamfers that apply to these extrude groups.
args.flush_batch_for_extrude_group_set(extrude_group_set.clone().into())
.await?;
let starting_extrude_groups: Vec<Box<ExtrudeGroup>> = extrude_group_set.into();
if args.ctx.is_mock {
return Ok(starting_extrude_groups);
}
let mut extrude_groups = Vec::new();
for e in starting_extrude_groups {
let new_extrude_groups = send_pattern_transform(transform.clone(), &e, args).await?;
extrude_groups.extend(new_extrude_groups);
}
Ok(extrude_groups)
}
async fn send_pattern_transform(
// This should be passed via reference, see
// https://github.com/KittyCAD/modeling-app/issues/2821
transform: Vec<kittycad::types::LinearTransform>,
extrude_group: &ExtrudeGroup,
args: &Args,
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
let id = uuid::Uuid::new_v4();
let resp = args
.send_modeling_cmd(
id,
ModelingCmd::EntityLinearPatternTransform {
entity_id: extrude_group.id,
transform,
},
)
.await?;
let kittycad::types::OkWebSocketResponseData::Modeling {
modeling_response: kittycad::types::OkModelingCmdResponse::EntityLinearPatternTransform { data: pattern_info },
} = &resp
else {
return Err(KclError::Engine(KclErrorDetails {
message: format!("EntityLinearPattern response was not as expected: {:?}", resp),
source_ranges: vec![args.source_range],
}));
};
let mut geometries = vec![Box::new(extrude_group.clone())];
for id in pattern_info.entity_ids.iter() {
let mut new_extrude_group = extrude_group.clone();
new_extrude_group.id = *id;
geometries.push(Box::new(new_extrude_group));
}
Ok(geometries)
}
async fn make_transform<'a>(
i: u32,
transform_function: &FunctionParam<'a>,
source_range: SourceRange,
) -> Result<kittycad::types::LinearTransform, KclError> {
// Call the transform fn for this repetition.
let repetition_num = MemoryItem::UserVal(UserVal {
value: serde_json::Value::Number(i.into()),
meta: vec![source_range.into()],
});
let transform_fn_args = vec![repetition_num];
let transform_fn_return = transform_function.call(transform_fn_args).await?.0;
// Unpack the returned transform object.
let source_ranges = vec![source_range];
let transform_fn_return = transform_fn_return.ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: "Transform function must return a value".to_string(),
source_ranges: source_ranges.clone(),
})
})?;
let ProgramReturn::Value(transform_fn_return) = transform_fn_return else {
return Err(KclError::Semantic(KclErrorDetails {
message: "Transform function must return a value".to_string(),
source_ranges: source_ranges.clone(),
}));
};
let MemoryItem::UserVal(transform) = transform_fn_return else {
return Err(KclError::Semantic(KclErrorDetails {
message: "Transform function must return a transform object".to_string(),
source_ranges: source_ranges.clone(),
}));
};
// Apply defaults to the transform.
let replicate = match transform.value.get("replicate") {
Some(serde_json::Value::Bool(true)) => true,
Some(serde_json::Value::Bool(false)) => false,
Some(_) => {
return Err(KclError::Semantic(KclErrorDetails {
message: "The 'replicate' key must be a bool".to_string(),
source_ranges: source_ranges.clone(),
}));
}
None => true,
};
let scale = match transform.value.get("scale") {
Some(x) => array_to_point3d(x, source_ranges.clone())?,
None => Point3d { x: 1.0, y: 1.0, z: 1.0 },
};
let translate = match transform.value.get("translate") {
Some(x) => array_to_point3d(x, source_ranges.clone())?,
None => Point3d { x: 0.0, y: 0.0, z: 0.0 },
};
let t = kittycad::types::LinearTransform {
replicate,
scale: Some(scale.into()),
translate: Some(translate.into()),
};
Ok(t)
}
fn array_to_point3d(json: &serde_json::Value, source_ranges: Vec<SourceRange>) -> Result<Point3d, KclError> {
let serde_json::Value::Array(arr) = dbg!(json) else {
return Err(KclError::Semantic(KclErrorDetails {
message: "Expected an array of 3 numbers (i.e. a 3D point)".to_string(),
source_ranges,
}));
};
let len = arr.len();
if len != 3 {
return Err(KclError::Semantic(KclErrorDetails {
message: format!("Expected an array of 3 numbers (i.e. a 3D point) but found {len} items"),
source_ranges,
}));
};
// Gets an f64 from a JSON value, returns Option.
let f = |j: &serde_json::Value| j.as_number().and_then(|num| num.as_f64()).map(|x| x.to_owned());
let err = |component| {
KclError::Semantic(KclErrorDetails {
message: format!("{component} component of this point was not a number"),
source_ranges: source_ranges.clone(),
})
};
let x = f(&arr[0]).ok_or_else(|| err("X"))?;
let y = f(&arr[1]).ok_or_else(|| err("Y"))?;
let z = f(&arr[2]).ok_or_else(|| err("Z"))?;
Ok(Point3d { x, y, z })
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_array_to_point3d() {
let input = serde_json::json! {
[1.1, 2.2, 3.3]
};
let expected = Point3d { x: 1.1, y: 2.2, z: 3.3 };
let actual = array_to_point3d(&input, Vec::new());
assert_eq!(actual.unwrap(), expected);
}
}
/// A linear pattern on a 2D sketch.
pub async fn pattern_linear_2d(args: Args) -> Result<MemoryItem, KclError> {
let (data, sketch_group_set): (LinearPattern2dData, SketchGroupSet) = args.get_data_and_sketch_group_set()?;

View File

@ -431,7 +431,7 @@ mod tests {
);
if let Err(err) = result {
assert!(err.to_string().contains( "Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
assert!(err.to_string().contains("Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
} else {
panic!("Expected error");
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 333 KiB

View File

@ -53,7 +53,7 @@ pub async fn execute_wasm(
is_mock,
};
let memory = ctx.run(program, Some(memory)).await.map_err(String::from)?;
let memory = ctx.run(&program, Some(memory)).await.map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
// gloo-serialize crate instead.
JsValue::from_serde(&memory).map_err(|e| e.to_string())
@ -297,7 +297,6 @@ pub async fn kcl_lsp_run(
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
is_initialized: Default::default(),
current_handle: Default::default(),
})
.custom_method("kcl/updateUnits", kcl_lib::lsp::kcl::Backend::update_units)
.custom_method("kcl/updateCanExecute", kcl_lib::lsp::kcl::Backend::update_can_execute)
@ -356,7 +355,7 @@ pub async fn copilot_lsp_run(config: ServerConfig, token: String, baseurl: Strin
zoo_client,
is_initialized: Default::default(),
current_handle: Default::default(),
diagnostics_map: Default::default(),
})
.custom_method("copilot/setEditorInfo", kcl_lib::lsp::copilot::Backend::set_editor_info)
.custom_method(

View File

@ -0,0 +1,26 @@
// Parameters
const r = 50 // base radius
const h = 10 // layer height
const t = 0.005 // taper factor [0-1)
// Defines how to modify each layer of the vase.
// Each replica is shifted up the Z axis, and has a smoothly-varying radius
fn transform = (replicaId) => {
let scale = r * abs(1 - (t * replicaId)) * (5 + cos(replicaId / 8))
return {
translate: [0, 0, replicaId * 10],
scale: [scale, scale, 0],
}
}
// Each layer is just a pretty thin cylinder with a fillet.
fn layer = () => {
return startSketchOn("XY") // or some other plane idk
|> circle([0, 0], 1, %, 'tag1')
|> extrude(h, %)
// |> fillet({
// radius: h / 2.01,
// tags: ["tag1", getOppositeEdge("tag1", %)]
// }, %)
}
// The vase is 100 layers tall.
// The 100 layers are replica of each other, with a slight transformation applied to each.
let vase = layer() |> patternTransform(100, transform, %)

View File

@ -51,7 +51,7 @@ async fn execute_and_snapshot(code: &str, units: UnitLength) -> Result<image::Dy
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let snapshot = ctx.execute_and_prepare_snapshot(program).await?;
let snapshot = ctx.execute_and_prepare_snapshot(&program).await?;
// Create a temporary file to write the output to.
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
@ -2464,3 +2464,10 @@ async fn serial_test_global_tags() {
let result = execute_and_snapshot(code, UnitLength::Mm).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/global_tags.png", &result, 0.999);
}
#[tokio::test(flavor = "multi_thread")]
async fn serial_test_pattern_vase() {
let code = include_str!("inputs/pattern_vase.kcl");
let result = execute_and_snapshot(code, UnitLength::Mm).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/pattern_vase.png", &result, 0.999);
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 333 KiB

View File

@ -35,7 +35,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Program, uuid
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let ctx = kcl_lib::executor::ExecutorContext::new(&client, Default::default()).await?;
let memory = ctx.run(program.clone(), None).await?;
let memory = ctx.run(&program, None).await?;
// We need to get the sketch ID.
// Get the sketch group ID from memory.