Compare commits
5 Commits
remove-the
...
refactor-l
Author | SHA1 | Date | |
---|---|---|---|
2363010cbc | |||
bd17bc98d5 | |||
e7a2824cb2 | |||
f77ed3d790 | |||
bc5fdbad43 |
4
.github/workflows/cargo-bench.yml
vendored
4
.github/workflows/cargo-bench.yml
vendored
@ -38,7 +38,5 @@ jobs:
|
|||||||
- name: Benchmark kcl library
|
- name: Benchmark kcl library
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |-
|
run: |-
|
||||||
cd src/wasm-lib/kcl; cargo bench --all-features -- iai
|
cd src/wasm-lib/kcl; cargo bench -- iai
|
||||||
env:
|
|
||||||
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN}}
|
|
||||||
|
|
||||||
|
27
.github/workflows/playwright.yml
vendored
27
.github/workflows/playwright.yml
vendored
@ -38,8 +38,6 @@ jobs:
|
|||||||
runs-on: ubuntu-latest-8-cores
|
runs-on: ubuntu-latest-8-cores
|
||||||
needs: check-rust-changes
|
needs: check-rust-changes
|
||||||
steps:
|
steps:
|
||||||
- name: Tune GitHub-hosted runner network
|
|
||||||
uses: smorimoto/tune-github-hosted-runner-network@v1
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@ -92,17 +90,14 @@ jobs:
|
|||||||
- name: build web
|
- name: build web
|
||||||
run: yarn build:local
|
run: yarn build:local
|
||||||
- name: Run ubuntu/chrome snapshots
|
- name: Run ubuntu/chrome snapshots
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
run: |
|
||||||
yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
|
yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
|
||||||
|
# remove test-results, messes with retry logic
|
||||||
|
rm -r test-results
|
||||||
env:
|
env:
|
||||||
CI: true
|
CI: true
|
||||||
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||||
snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }}
|
snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }}
|
||||||
- name: Clean up test-results
|
|
||||||
if: always()
|
|
||||||
continue-on-error: true
|
|
||||||
run: rm -r test-results
|
|
||||||
- name: check for changes
|
- name: check for changes
|
||||||
id: git-check
|
id: git-check
|
||||||
run: |
|
run: |
|
||||||
@ -129,7 +124,7 @@ jobs:
|
|||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
if: steps.git-check.outputs.modified == 'true'
|
if: steps.git-check.outputs.modified == 'true'
|
||||||
with:
|
with:
|
||||||
name: playwright-report-ubuntu-${{ github.sha }}
|
name: playwright-report-ubuntu
|
||||||
path: playwright-report/
|
path: playwright-report/
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
# if have previous run results, use them
|
# if have previous run results, use them
|
||||||
@ -137,7 +132,7 @@ jobs:
|
|||||||
if: always()
|
if: always()
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
name: test-results-ubuntu-${{ github.sha }}
|
name: test-results-ubuntu
|
||||||
path: test-results/
|
path: test-results/
|
||||||
- name: Run ubuntu/chrome flow retry failures
|
- name: Run ubuntu/chrome flow retry failures
|
||||||
id: retry
|
id: retry
|
||||||
@ -163,25 +158,23 @@ jobs:
|
|||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: test-results-ubuntu-${{ github.sha }}
|
name: test-results-ubuntu
|
||||||
path: test-results/
|
path: test-results/
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
name: playwright-report-ubuntu-${{ github.sha }}
|
name: playwright-report-ubuntu
|
||||||
path: playwright-report/
|
path: playwright-report/
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
playwright-macos:
|
playwright-macos:
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: macos-14-large
|
runs-on: macos-14
|
||||||
needs: check-rust-changes
|
needs: check-rust-changes
|
||||||
steps:
|
steps:
|
||||||
- name: Tune GitHub-hosted runner network
|
|
||||||
uses: smorimoto/tune-github-hosted-runner-network@v1
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
@ -239,7 +232,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
with:
|
with:
|
||||||
name: test-results-macos-${{ github.sha }}
|
name: test-results-macos
|
||||||
path: test-results/
|
path: test-results/
|
||||||
- name: Run macos/safari flow retry failures
|
- name: Run macos/safari flow retry failures
|
||||||
id: retry
|
id: retry
|
||||||
@ -267,14 +260,14 @@ jobs:
|
|||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: test-results-macos-${{ github.sha }}
|
name: test-results-macos
|
||||||
path: test-results/
|
path: test-results/
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
overwrite: true
|
overwrite: true
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: playwright-report-macos-${{ github.sha }}
|
name: playwright-report-macos
|
||||||
path: playwright-report/
|
path: playwright-report/
|
||||||
retention-days: 30
|
retention-days: 30
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
@ -55,7 +55,6 @@ layout: manual
|
|||||||
* [`patternCircular3d`](kcl/patternCircular3d)
|
* [`patternCircular3d`](kcl/patternCircular3d)
|
||||||
* [`patternLinear2d`](kcl/patternLinear2d)
|
* [`patternLinear2d`](kcl/patternLinear2d)
|
||||||
* [`patternLinear3d`](kcl/patternLinear3d)
|
* [`patternLinear3d`](kcl/patternLinear3d)
|
||||||
* [`patternTransform`](kcl/patternTransform)
|
|
||||||
* [`pi`](kcl/pi)
|
* [`pi`](kcl/pi)
|
||||||
* [`pow`](kcl/pow)
|
* [`pow`](kcl/pow)
|
||||||
* [`profileStart`](kcl/profileStart)
|
* [`profileStart`](kcl/profileStart)
|
||||||
|
File diff suppressed because one or more lines are too long
4230
docs/kcl/std.json
4230
docs/kcl/std.json
File diff suppressed because it is too large
Load Diff
@ -1214,18 +1214,12 @@ test('Auto complete works', async ({ page }) => {
|
|||||||
await page.waitForTimeout(100)
|
await page.waitForTimeout(100)
|
||||||
// press arrow down twice then enter to accept xLine
|
// press arrow down twice then enter to accept xLine
|
||||||
await page.keyboard.press('ArrowDown')
|
await page.keyboard.press('ArrowDown')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.keyboard.press('ArrowDown')
|
await page.keyboard.press('ArrowDown')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.keyboard.press('Enter')
|
await page.keyboard.press('Enter')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
// finish line with comment
|
// finish line with comment
|
||||||
await page.keyboard.type('5')
|
await page.keyboard.type('5')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.keyboard.press('Tab')
|
await page.keyboard.press('Tab')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.keyboard.press('Tab')
|
await page.keyboard.press('Tab')
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.keyboard.type(' // lin')
|
await page.keyboard.type(' // lin')
|
||||||
await page.waitForTimeout(100)
|
await page.waitForTimeout(100)
|
||||||
// there shouldn't be any auto complete options for 'lin' in the comment
|
// there shouldn't be any auto complete options for 'lin' in the comment
|
||||||
@ -1695,7 +1689,6 @@ test.describe('Onboarding tests', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test.describe('Testing selections', () => {
|
test.describe('Testing selections', () => {
|
||||||
test.setTimeout(90_000)
|
|
||||||
test('Selections work on fresh and edited sketch', async ({ page }) => {
|
test('Selections work on fresh and edited sketch', async ({ page }) => {
|
||||||
// tests mapping works on fresh sketch and edited sketch
|
// tests mapping works on fresh sketch and edited sketch
|
||||||
// tests using hovers which is the same as selections, because if
|
// tests using hovers which is the same as selections, because if
|
||||||
@ -1901,239 +1894,6 @@ test.describe('Testing selections', () => {
|
|||||||
await selectionSequence()
|
await selectionSequence()
|
||||||
})
|
})
|
||||||
|
|
||||||
test('Solids should be select and deletable', async ({ page }) => {
|
|
||||||
test.setTimeout(90_000)
|
|
||||||
const u = await getUtils(page)
|
|
||||||
await page.addInitScript(async () => {
|
|
||||||
localStorage.setItem(
|
|
||||||
'persistCode',
|
|
||||||
`const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([-79.26, 95.04], %)
|
|
||||||
|> line([112.54, 127.64], %, $seg02)
|
|
||||||
|> line([170.36, -121.61], %, $seg01)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(50, sketch001)
|
|
||||||
const sketch005 = startSketchOn(extrude001, 'END')
|
|
||||||
|> startProfileAt([23.24, 136.52], %)
|
|
||||||
|> line([-8.44, 36.61], %)
|
|
||||||
|> line([49.4, 2.05], %)
|
|
||||||
|> line([29.69, -46.95], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const sketch003 = startSketchOn(extrude001, seg01)
|
|
||||||
|> startProfileAt([21.23, 17.81], %)
|
|
||||||
|> line([51.97, 21.32], %)
|
|
||||||
|> line([4.07, -22.75], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const sketch002 = startSketchOn(extrude001, seg02)
|
|
||||||
|> startProfileAt([-100.54, 16.99], %)
|
|
||||||
|> line([0, 20.03], %)
|
|
||||||
|> line([62.61, 0], %, $seg03)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude002 = extrude(50, sketch002)
|
|
||||||
const sketch004 = startSketchOn(extrude002, seg03)
|
|
||||||
|> startProfileAt([57.07, 134.77], %)
|
|
||||||
|> line([-4.72, 22.84], %)
|
|
||||||
|> line([28.8, 6.71], %)
|
|
||||||
|> line([9.19, -25.33], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude003 = extrude(20, sketch004)
|
|
||||||
const pipeLength = 40
|
|
||||||
const pipeSmallDia = 10
|
|
||||||
const pipeLargeDia = 20
|
|
||||||
const thickness = 0.5
|
|
||||||
const part009 = startSketchOn('XY')
|
|
||||||
|> startProfileAt([pipeLargeDia - (thickness / 2), 38], %)
|
|
||||||
|> line([thickness, 0], %)
|
|
||||||
|> line([0, -1], %)
|
|
||||||
|> angledLineToX({
|
|
||||||
angle: 60,
|
|
||||||
to: pipeSmallDia + thickness
|
|
||||||
}, %)
|
|
||||||
|> line([0, -pipeLength], %)
|
|
||||||
|> angledLineToX({
|
|
||||||
angle: -60,
|
|
||||||
to: pipeLargeDia + thickness
|
|
||||||
}, %)
|
|
||||||
|> line([0, -1], %)
|
|
||||||
|> line([-thickness, 0], %)
|
|
||||||
|> line([0, 1], %)
|
|
||||||
|> angledLineToX({ angle: 120, to: pipeSmallDia }, %)
|
|
||||||
|> line([0, pipeLength], %)
|
|
||||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
|
||||||
|> close(%)
|
|
||||||
const rev = revolve({ axis: 'y' }, part009)
|
|
||||||
`
|
|
||||||
)
|
|
||||||
}, KCL_DEFAULT_LENGTH)
|
|
||||||
await page.setViewportSize({ width: 1000, height: 500 })
|
|
||||||
await page.goto('/')
|
|
||||||
await u.waitForAuthSkipAppStart()
|
|
||||||
|
|
||||||
await u.openDebugPanel()
|
|
||||||
await u.expectCmdLog('[data-message-type="execution-done"]')
|
|
||||||
await u.closeDebugPanel()
|
|
||||||
|
|
||||||
await u.openAndClearDebugPanel()
|
|
||||||
await u.sendCustomCmd({
|
|
||||||
type: 'modeling_cmd_req',
|
|
||||||
cmd_id: uuidv4(),
|
|
||||||
cmd: {
|
|
||||||
type: 'default_camera_look_at',
|
|
||||||
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
|
|
||||||
center: { x: -2206.68, y: -1298.36, z: 60 },
|
|
||||||
up: { x: 0, y: 0, z: 1 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await u.sendCustomCmd({
|
|
||||||
type: 'modeling_cmd_req',
|
|
||||||
cmd_id: uuidv4(),
|
|
||||||
cmd: {
|
|
||||||
type: 'default_camera_get_settings',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
|
|
||||||
const revolve = { x: 646, y: 248 }
|
|
||||||
const parentExtrude = { x: 915, y: 133 }
|
|
||||||
const solid2d = { x: 770, y: 167 }
|
|
||||||
|
|
||||||
// DELETE REVOLVE
|
|
||||||
await page.mouse.click(revolve.x, revolve.y)
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
|
||||||
'|> line([0, -pipeLength], %)'
|
|
||||||
)
|
|
||||||
await u.clearCommandLogs()
|
|
||||||
await page.keyboard.press('Backspace')
|
|
||||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
|
|
||||||
await expect(u.codeLocator).not.toContainText(
|
|
||||||
`const rev = revolve({ axis: 'y' }, part009)`
|
|
||||||
)
|
|
||||||
|
|
||||||
// DELETE PARENT EXTRUDE
|
|
||||||
await page.mouse.click(parentExtrude.x, parentExtrude.y)
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
|
||||||
'|> line([170.36, -121.61], %, $seg01)'
|
|
||||||
)
|
|
||||||
await u.clearCommandLogs()
|
|
||||||
await page.keyboard.press('Backspace')
|
|
||||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(u.codeLocator).not.toContainText(
|
|
||||||
`const extrude001 = extrude(50, sketch001)`
|
|
||||||
)
|
|
||||||
await expect(u.codeLocator).toContainText(`const sketch005 = startSketchOn({
|
|
||||||
plane: {
|
|
||||||
origin: { x: 0, y: -50, z: 0 },
|
|
||||||
x_axis: { x: 1, y: 0, z: 0 },
|
|
||||||
y_axis: { x: 0, y: 0, z: 1 },
|
|
||||||
z_axis: { x: 0, y: -1, z: 0 }
|
|
||||||
}
|
|
||||||
})`)
|
|
||||||
await expect(u.codeLocator).toContainText(`const sketch003 = startSketchOn({
|
|
||||||
plane: {
|
|
||||||
origin: { x: 116.53, y: 0, z: 163.25 },
|
|
||||||
x_axis: { x: -0.81, y: 0, z: 0.58 },
|
|
||||||
y_axis: { x: 0, y: -1, z: 0 },
|
|
||||||
z_axis: { x: 0.58, y: 0, z: 0.81 }
|
|
||||||
}
|
|
||||||
})`)
|
|
||||||
await expect(u.codeLocator).toContainText(`const sketch002 = startSketchOn({
|
|
||||||
plane: {
|
|
||||||
origin: { x: -91.74, y: 0, z: 80.89 },
|
|
||||||
x_axis: { x: -0.66, y: 0, z: -0.75 },
|
|
||||||
y_axis: { x: 0, y: -1, z: 0 },
|
|
||||||
z_axis: { x: -0.75, y: 0, z: 0.66 }
|
|
||||||
}
|
|
||||||
})`)
|
|
||||||
|
|
||||||
// DELETE SOLID 2D
|
|
||||||
await page.mouse.click(solid2d.x, solid2d.y)
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
|
||||||
'|> startProfileAt([23.24, 136.52], %)'
|
|
||||||
)
|
|
||||||
await u.clearCommandLogs()
|
|
||||||
await page.keyboard.press('Backspace')
|
|
||||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(u.codeLocator).not.toContainText(
|
|
||||||
`const sketch005 = startSketchOn({`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
test("Deleting solid that the AST mod can't handle results in a toast message", async ({
|
|
||||||
page,
|
|
||||||
}) => {
|
|
||||||
const u = await getUtils(page)
|
|
||||||
await page.addInitScript(async () => {
|
|
||||||
localStorage.setItem(
|
|
||||||
'persistCode',
|
|
||||||
`const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([-79.26, 95.04], %)
|
|
||||||
|> line([112.54, 127.64], %, $seg02)
|
|
||||||
|> line([170.36, -121.61], %, $seg01)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(50, sketch001)
|
|
||||||
const launderExtrudeThroughVar = extrude001
|
|
||||||
const sketch002 = startSketchOn(launderExtrudeThroughVar, seg02)
|
|
||||||
|> startProfileAt([-100.54, 16.99], %)
|
|
||||||
|> line([0, 20.03], %)
|
|
||||||
|> line([62.61, 0], %, $seg03)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
`
|
|
||||||
)
|
|
||||||
}, KCL_DEFAULT_LENGTH)
|
|
||||||
await page.setViewportSize({ width: 1000, height: 500 })
|
|
||||||
await page.goto('/')
|
|
||||||
await u.waitForAuthSkipAppStart()
|
|
||||||
|
|
||||||
await u.openDebugPanel()
|
|
||||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
|
||||||
await u.closeDebugPanel()
|
|
||||||
|
|
||||||
await u.openAndClearDebugPanel()
|
|
||||||
await u.sendCustomCmd({
|
|
||||||
type: 'modeling_cmd_req',
|
|
||||||
cmd_id: uuidv4(),
|
|
||||||
cmd: {
|
|
||||||
type: 'default_camera_look_at',
|
|
||||||
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
|
|
||||||
center: { x: -2206.68, y: -1298.36, z: 60 },
|
|
||||||
up: { x: 0, y: 0, z: 1 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await u.sendCustomCmd({
|
|
||||||
type: 'modeling_cmd_req',
|
|
||||||
cmd_id: uuidv4(),
|
|
||||||
cmd: {
|
|
||||||
type: 'default_camera_get_settings',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
|
|
||||||
// attempt delete
|
|
||||||
await page.mouse.click(930, 139)
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
|
||||||
'|> line([170.36, -121.61], %, $seg01)'
|
|
||||||
)
|
|
||||||
await u.clearCommandLogs()
|
|
||||||
await page.keyboard.press('Backspace')
|
|
||||||
|
|
||||||
await expect(page.getByText('Unable to delete part')).toBeVisible()
|
|
||||||
})
|
|
||||||
test('Hovering over 3d features highlights code', async ({ page }) => {
|
test('Hovering over 3d features highlights code', async ({ page }) => {
|
||||||
const u = await getUtils(page)
|
const u = await getUtils(page)
|
||||||
await page.addInitScript(async (KCL_DEFAULT_LENGTH) => {
|
await page.addInitScript(async (KCL_DEFAULT_LENGTH) => {
|
||||||
@ -2361,104 +2121,6 @@ const part001 = startSketchOn('XZ')
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
test("Hovering and selection of extruded faces works, and is not overridden shortly after user's click", async ({
|
|
||||||
page,
|
|
||||||
}) => {
|
|
||||||
await page.addInitScript(async () => {
|
|
||||||
localStorage.setItem(
|
|
||||||
'persistCode',
|
|
||||||
`const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([-79.26, 95.04], %)
|
|
||||||
|> line([112.54, 127.64], %)
|
|
||||||
|> line([170.36, -121.61], %, $seg01)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(50, sketch001)
|
|
||||||
`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
const u = await getUtils(page)
|
|
||||||
await page.setViewportSize({ width: 1200, height: 500 })
|
|
||||||
await page.goto('/')
|
|
||||||
await u.waitForAuthSkipAppStart()
|
|
||||||
await u.openAndClearDebugPanel()
|
|
||||||
|
|
||||||
await u.sendCustomCmd({
|
|
||||||
type: 'modeling_cmd_req',
|
|
||||||
cmd_id: uuidv4(),
|
|
||||||
cmd: {
|
|
||||||
type: 'default_camera_look_at',
|
|
||||||
vantage: { x: 6615, y: -9505, z: 10344 },
|
|
||||||
center: { x: 1579, y: -635, z: 4035 },
|
|
||||||
up: { x: 0, y: 0, z: 1 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await u.waitForCmdReceive('default_camera_look_at')
|
|
||||||
await u.clearAndCloseDebugPanel()
|
|
||||||
|
|
||||||
await page.waitForTimeout(1000)
|
|
||||||
|
|
||||||
const isMac = process.platform === 'darwin'
|
|
||||||
|
|
||||||
let noHoverColor: [number, number, number] = [82, 82, 82]
|
|
||||||
let hoverColor: [number, number, number] = [116, 116, 116]
|
|
||||||
let selectColor: [number, number, number] = [144, 148, 97]
|
|
||||||
|
|
||||||
const extrudeWall = { x: 670, y: 275 }
|
|
||||||
const extrudeText = `line([170.36, -121.61], %, $seg01)`
|
|
||||||
|
|
||||||
const cap = { x: 594, y: 283 }
|
|
||||||
const capText = `startProfileAt([-79.26, 95.04], %)`
|
|
||||||
|
|
||||||
const nothing = { x: 946, y: 229 }
|
|
||||||
|
|
||||||
expect(await u.getGreatestPixDiff(extrudeWall, noHoverColor)).toBeLessThan(
|
|
||||||
5
|
|
||||||
)
|
|
||||||
await page.mouse.move(nothing.x, nothing.y)
|
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.mouse.move(extrudeWall.x, extrudeWall.y)
|
|
||||||
await expect(page.getByTestId('hover-highlight')).toBeVisible()
|
|
||||||
await expect(page.getByTestId('hover-highlight')).toContainText(extrudeText)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(
|
|
||||||
await u.getGreatestPixDiff(extrudeWall, hoverColor)
|
|
||||||
).toBeLessThan(5)
|
|
||||||
await page.mouse.click(extrudeWall.x, extrudeWall.y)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${extrudeText}`)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(
|
|
||||||
await u.getGreatestPixDiff(extrudeWall, selectColor)
|
|
||||||
).toBeLessThan(5)
|
|
||||||
await page.waitForTimeout(1000)
|
|
||||||
// check color stays there, i.e. not overridden (this was a bug previously)
|
|
||||||
await expect(
|
|
||||||
await u.getGreatestPixDiff(extrudeWall, selectColor)
|
|
||||||
).toBeLessThan(5)
|
|
||||||
|
|
||||||
await page.mouse.move(nothing.x, nothing.y)
|
|
||||||
await page.waitForTimeout(300)
|
|
||||||
await expect(page.getByTestId('hover-highlight')).not.toBeVisible()
|
|
||||||
|
|
||||||
// because of shading, color is not exact everywhere on the face
|
|
||||||
noHoverColor = [104, 104, 104]
|
|
||||||
hoverColor = [134, 134, 134]
|
|
||||||
selectColor = [158, 162, 110]
|
|
||||||
|
|
||||||
await expect(await u.getGreatestPixDiff(cap, noHoverColor)).toBeLessThan(5)
|
|
||||||
await page.mouse.move(cap.x, cap.y)
|
|
||||||
await expect(page.getByTestId('hover-highlight')).toBeVisible()
|
|
||||||
await expect(page.getByTestId('hover-highlight')).toContainText(capText)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(await u.getGreatestPixDiff(cap, hoverColor)).toBeLessThan(5)
|
|
||||||
await page.mouse.click(cap.x, cap.y)
|
|
||||||
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${capText}`)
|
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
|
|
||||||
await page.waitForTimeout(1000)
|
|
||||||
// check color stays there, i.e. not overridden (this was a bug previously)
|
|
||||||
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test.describe('Command bar tests', () => {
|
test.describe('Command bar tests', () => {
|
||||||
@ -2477,10 +2139,10 @@ test.describe('Command bar tests', () => {
|
|||||||
.or(page.getByRole('button', { name: '⌘K' }))
|
.or(page.getByRole('button', { name: '⌘K' }))
|
||||||
.click()
|
.click()
|
||||||
|
|
||||||
let cmdSearchBar = page.getByPlaceholder('Search commands')
|
let cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||||
await expect(cmdSearchBar).toBeVisible()
|
await expect(cmdSearchBar).toBeVisible()
|
||||||
await page.keyboard.press('Escape')
|
await page.keyboard.press('Escape')
|
||||||
cmdSearchBar = page.getByPlaceholder('Search commands')
|
cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||||
await expect(cmdSearchBar).not.toBeVisible()
|
await expect(cmdSearchBar).not.toBeVisible()
|
||||||
|
|
||||||
// Now try the same, but with the keyboard shortcut, check focus
|
// Now try the same, but with the keyboard shortcut, check focus
|
||||||
@ -2489,7 +2151,7 @@ test.describe('Command bar tests', () => {
|
|||||||
} else {
|
} else {
|
||||||
await page.locator('html').press('Control+C')
|
await page.locator('html').press('Control+C')
|
||||||
}
|
}
|
||||||
cmdSearchBar = page.getByPlaceholder('Search commands')
|
cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||||
await expect(cmdSearchBar).toBeVisible()
|
await expect(cmdSearchBar).toBeVisible()
|
||||||
await expect(cmdSearchBar).toBeFocused()
|
await expect(cmdSearchBar).toBeFocused()
|
||||||
|
|
||||||
@ -2870,6 +2532,9 @@ fn yohey = (pos) => {
|
|||||||
|
|
||||||
await page.getByText(selectionsSnippets.extrudeAndEditBlocked).click()
|
await page.getByText(selectionsSnippets.extrudeAndEditBlocked).click()
|
||||||
await expect(page.getByRole('button', { name: 'Extrude' })).toBeDisabled()
|
await expect(page.getByRole('button', { name: 'Extrude' })).toBeDisabled()
|
||||||
|
await expect(
|
||||||
|
page.getByRole('button', { name: 'Edit Sketch' })
|
||||||
|
).not.toBeVisible()
|
||||||
|
|
||||||
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
||||||
await expect(page.getByRole('button', { name: 'Extrude' })).not.toBeDisabled()
|
await expect(page.getByRole('button', { name: 'Extrude' })).not.toBeDisabled()
|
||||||
@ -2894,14 +2559,10 @@ fn yohey = (pos) => {
|
|||||||
// selecting an editable sketch but clicking "start sketch" should start a new sketch and not edit the existing one
|
// selecting an editable sketch but clicking "start sketch" should start a new sketch and not edit the existing one
|
||||||
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
||||||
await page.getByRole('button', { name: 'Start Sketch' }).click()
|
await page.getByRole('button', { name: 'Start Sketch' }).click()
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await page.getByTestId('KCL Code').click()
|
await page.getByTestId('KCL Code').click()
|
||||||
await page.waitForTimeout(200)
|
|
||||||
await page.mouse.click(734, 134)
|
await page.mouse.click(734, 134)
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.getByTestId('KCL Code').click()
|
await page.getByTestId('KCL Code').click()
|
||||||
// expect main content to contain `sketch005` i.e. started a new sketch
|
// expect main content to contain `sketch005` i.e. started a new sketch
|
||||||
await page.waitForTimeout(300)
|
|
||||||
await expect(page.locator('.cm-content')).toHaveText(
|
await expect(page.locator('.cm-content')).toHaveText(
|
||||||
/sketch001 = startSketchOn\('XZ'\)/
|
/sketch001 = startSketchOn\('XZ'\)/
|
||||||
)
|
)
|
||||||
@ -3185,7 +2846,7 @@ async function doEditSegmentsByDraggingHandle(page: Page, openPanes: string[]) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test.describe('Can edit segments by dragging their handles', () => {
|
test.describe('Can edit segments by dragging their handles', () => {
|
||||||
test('code pane open at start-handles', async ({ page }) => {
|
test('code pane open at start', async ({ page }) => {
|
||||||
// Load the app with the code panes
|
// Load the app with the code panes
|
||||||
await page.addInitScript(async () => {
|
await page.addInitScript(async () => {
|
||||||
localStorage.setItem(
|
localStorage.setItem(
|
||||||
@ -3201,7 +2862,7 @@ test.describe('Can edit segments by dragging their handles', () => {
|
|||||||
await doEditSegmentsByDraggingHandle(page, ['code'])
|
await doEditSegmentsByDraggingHandle(page, ['code'])
|
||||||
})
|
})
|
||||||
|
|
||||||
test('code pane closed at start-handles', async ({ page }) => {
|
test('code pane closed at start', async ({ page }) => {
|
||||||
// Load the app with the code panes
|
// Load the app with the code panes
|
||||||
await page.addInitScript(async () => {
|
await page.addInitScript(async () => {
|
||||||
localStorage.setItem(
|
localStorage.setItem(
|
||||||
@ -3519,7 +3180,6 @@ test.describe('Snap to close works (at any scale)', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
test('Sketch on face', async ({ page }) => {
|
test('Sketch on face', async ({ page }) => {
|
||||||
test.setTimeout(90_000)
|
|
||||||
const u = await getUtils(page)
|
const u = await getUtils(page)
|
||||||
await page.addInitScript(async () => {
|
await page.addInitScript(async () => {
|
||||||
localStorage.setItem(
|
localStorage.setItem(
|
||||||
@ -5713,7 +5373,6 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
|||||||
)
|
)
|
||||||
|
|
||||||
await page.getByTestId('overlay-menu').click()
|
await page.getByTestId('overlay-menu').click()
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.getByText('Delete Segment').click()
|
await page.getByText('Delete Segment').click()
|
||||||
|
|
||||||
await page.getByText('Cancel').click()
|
await page.getByText('Cancel').click()
|
||||||
@ -5726,7 +5385,6 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
|||||||
)
|
)
|
||||||
|
|
||||||
await page.getByTestId('overlay-menu').click()
|
await page.getByTestId('overlay-menu').click()
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.getByText('Delete Segment').click()
|
await page.getByText('Delete Segment').click()
|
||||||
|
|
||||||
await page.getByText('Continue and unconstrain').last().click()
|
await page.getByText('Continue and unconstrain').last().click()
|
||||||
@ -5875,7 +5533,6 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
|||||||
await expect(page.locator('.cm-content')).toContainText(before)
|
await expect(page.locator('.cm-content')).toContainText(before)
|
||||||
|
|
||||||
await page.getByTestId('overlay-menu').click()
|
await page.getByTestId('overlay-menu').click()
|
||||||
await page.waitForTimeout(100)
|
|
||||||
await page.getByText('Remove constraints').click()
|
await page.getByText('Remove constraints').click()
|
||||||
|
|
||||||
await expect(page.locator('.cm-content')).toContainText(after)
|
await expect(page.locator('.cm-content')).toContainText(after)
|
||||||
@ -6049,8 +5706,8 @@ test('Basic default modeling and sketch hotkeys work', async ({ page }) => {
|
|||||||
await expect(extrudeButton).not.toBeDisabled()
|
await expect(extrudeButton).not.toBeDisabled()
|
||||||
await page.keyboard.press('e')
|
await page.keyboard.press('e')
|
||||||
await page.waitForTimeout(100)
|
await page.waitForTimeout(100)
|
||||||
await page.mouse.move(800, 200, { steps: 5 })
|
await page.mouse.move(900, 200, { steps: 5 })
|
||||||
await page.mouse.click(800, 200)
|
await page.mouse.click(900, 200)
|
||||||
await page.waitForTimeout(100)
|
await page.waitForTimeout(100)
|
||||||
await page.getByRole('button', { name: 'Continue' }).click()
|
await page.getByRole('button', { name: 'Continue' }).click()
|
||||||
await page.getByRole('button', { name: 'Submit command' }).click()
|
await page.getByRole('button', { name: 'Submit command' }).click()
|
||||||
|
@ -45,8 +45,8 @@ async function clearCommandLogs(page: Page) {
|
|||||||
await page.getByTestId('clear-commands').click()
|
await page.getByTestId('clear-commands').click()
|
||||||
}
|
}
|
||||||
|
|
||||||
async function expectCmdLog(page: Page, locatorStr: string, timeout = 5000) {
|
async function expectCmdLog(page: Page, locatorStr: string) {
|
||||||
await expect(page.locator(locatorStr).last()).toBeVisible({ timeout })
|
await expect(page.locator(locatorStr).last()).toBeVisible()
|
||||||
}
|
}
|
||||||
|
|
||||||
async function waitForDefaultPlanesToBeVisible(page: Page) {
|
async function waitForDefaultPlanesToBeVisible(page: Page) {
|
||||||
@ -228,8 +228,7 @@ export async function getUtils(page: Page) {
|
|||||||
await fillInput('z', xyz[2])
|
await fillInput('z', xyz[2])
|
||||||
},
|
},
|
||||||
clearCommandLogs: () => clearCommandLogs(page),
|
clearCommandLogs: () => clearCommandLogs(page),
|
||||||
expectCmdLog: (locatorStr: string, timeout = 5000) =>
|
expectCmdLog: (locatorStr: string) => expectCmdLog(page, locatorStr),
|
||||||
expectCmdLog(page, locatorStr, timeout),
|
|
||||||
openKclCodePanel: () => openKclCodePanel(page),
|
openKclCodePanel: () => openKclCodePanel(page),
|
||||||
closeKclCodePanel: () => closeKclCodePanel(page),
|
closeKclCodePanel: () => closeKclCodePanel(page),
|
||||||
openDebugPanel: () => openDebugPanel(page),
|
openDebugPanel: () => openDebugPanel(page),
|
||||||
@ -301,19 +300,11 @@ export async function getUtils(page: Page) {
|
|||||||
(screenshot.width * coords.y * pixMultiplier +
|
(screenshot.width * coords.y * pixMultiplier +
|
||||||
coords.x * pixMultiplier) *
|
coords.x * pixMultiplier) *
|
||||||
4 // rbga is 4 channels
|
4 // rbga is 4 channels
|
||||||
const maxDiff = Math.max(
|
return Math.max(
|
||||||
Math.abs(screenshot.data[index] - expected[0]),
|
Math.abs(screenshot.data[index] - expected[0]),
|
||||||
Math.abs(screenshot.data[index + 1] - expected[1]),
|
Math.abs(screenshot.data[index + 1] - expected[1]),
|
||||||
Math.abs(screenshot.data[index + 2] - expected[2])
|
Math.abs(screenshot.data[index + 2] - expected[2])
|
||||||
)
|
)
|
||||||
if (maxDiff > 4) {
|
|
||||||
console.log(
|
|
||||||
`Expected: ${expected} Actual: [${screenshot.data[index]}, ${
|
|
||||||
screenshot.data[index + 1]
|
|
||||||
}, ${screenshot.data[index + 2]}]`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return maxDiff
|
|
||||||
},
|
},
|
||||||
doAndWaitForImageDiff: (fn: () => Promise<any>, diffCount = 200) =>
|
doAndWaitForImageDiff: (fn: () => Promise<any>, diffCount = 200) =>
|
||||||
new Promise(async (resolve) => {
|
new Promise(async (resolve) => {
|
||||||
|
@ -12,7 +12,6 @@
|
|||||||
"@headlessui/tailwindcss": "^0.2.0",
|
"@headlessui/tailwindcss": "^0.2.0",
|
||||||
"@kittycad/lib": "^0.0.67",
|
"@kittycad/lib": "^0.0.67",
|
||||||
"@lezer/javascript": "^1.4.9",
|
"@lezer/javascript": "^1.4.9",
|
||||||
"@open-rpc/client-js": "^1.8.1",
|
|
||||||
"@react-hook/resize-observer": "^2.0.1",
|
"@react-hook/resize-observer": "^2.0.1",
|
||||||
"@replit/codemirror-interact": "^6.3.1",
|
"@replit/codemirror-interact": "^6.3.1",
|
||||||
"@tauri-apps/api": "2.0.0-beta.12",
|
"@tauri-apps/api": "2.0.0-beta.12",
|
||||||
@ -42,7 +41,7 @@
|
|||||||
"fuse.js": "^7.0.0",
|
"fuse.js": "^7.0.0",
|
||||||
"html2canvas-pro": "^1.4.3",
|
"html2canvas-pro": "^1.4.3",
|
||||||
"http-server": "^14.1.1",
|
"http-server": "^14.1.1",
|
||||||
"json-rpc-2.0": "^1.6.0",
|
"json-rpc-2.0": "^1.7.0",
|
||||||
"jszip": "^3.10.1",
|
"jszip": "^3.10.1",
|
||||||
"node-fetch": "^3.3.2",
|
"node-fetch": "^3.3.2",
|
||||||
"re-resizable": "^6.9.11",
|
"re-resizable": "^6.9.11",
|
||||||
@ -62,7 +61,8 @@
|
|||||||
"ua-parser-js": "^1.0.37",
|
"ua-parser-js": "^1.0.37",
|
||||||
"uuid": "^9.0.1",
|
"uuid": "^9.0.1",
|
||||||
"vitest": "^1.6.0",
|
"vitest": "^1.6.0",
|
||||||
"vscode-jsonrpc": "^8.2.1",
|
"vscode-languageclient": "^9.0.1",
|
||||||
|
"vscode-languageserver": "^9.0.1",
|
||||||
"vscode-languageserver-protocol": "^3.17.5",
|
"vscode-languageserver-protocol": "^3.17.5",
|
||||||
"wasm-pack": "^0.12.1",
|
"wasm-pack": "^0.12.1",
|
||||||
"web-vitals": "^3.5.2",
|
"web-vitals": "^3.5.2",
|
||||||
@ -89,7 +89,7 @@
|
|||||||
"fmt-check": "prettier --check ./src *.ts *.json *.js ./e2e",
|
"fmt-check": "prettier --check ./src *.ts *.json *.js ./e2e",
|
||||||
"fetch:wasm": "./get-latest-wasm-bundle.sh",
|
"fetch:wasm": "./get-latest-wasm-bundle.sh",
|
||||||
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
||||||
"build:wasm": "(cd src/wasm-lib && wasm-pack build --release --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
"build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
||||||
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
|
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
|
||||||
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",
|
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",
|
||||||
"wasm-prep": "rm -rf src/wasm-lib/pkg && mkdir src/wasm-lib/pkg && rm -rf src/wasm-lib/kcl/bindings",
|
"wasm-prep": "rm -rf src/wasm-lib/pkg && mkdir src/wasm-lib/pkg && rm -rf src/wasm-lib/kcl/bindings",
|
||||||
|
4
src-tauri/Cargo.lock
generated
4
src-tauri/Cargo.lock
generated
@ -4546,9 +4546,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.118"
|
version = "1.0.116"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
|
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap 2.2.6",
|
"indexmap 2.2.6",
|
||||||
"itoa 1.0.11",
|
"itoa 1.0.11",
|
||||||
|
@ -25,7 +25,6 @@ import { LowerRightControls } from 'components/LowerRightControls'
|
|||||||
import ModalContainer from 'react-modal-promise'
|
import ModalContainer from 'react-modal-promise'
|
||||||
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
||||||
import Gizmo from 'components/Gizmo'
|
import Gizmo from 'components/Gizmo'
|
||||||
import { CoreDumpManager } from 'lib/coredump'
|
|
||||||
|
|
||||||
export function App() {
|
export function App() {
|
||||||
useRefreshSettings(paths.FILE + 'SETTINGS')
|
useRefreshSettings(paths.FILE + 'SETTINGS')
|
||||||
@ -56,11 +55,7 @@ export function App() {
|
|||||||
setHtmlRef(ref)
|
setHtmlRef(ref)
|
||||||
}, [ref])
|
}, [ref])
|
||||||
|
|
||||||
const { auth, settings } = useSettingsAuthContext()
|
const { settings } = useSettingsAuthContext()
|
||||||
const token = auth?.context?.token
|
|
||||||
|
|
||||||
const coreDumpManager = new CoreDumpManager(engineCommandManager, ref, token)
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
app: { onboardingStatus },
|
app: { onboardingStatus },
|
||||||
} = settings.context
|
} = settings.context
|
||||||
@ -134,7 +129,7 @@ export function App() {
|
|||||||
<ModelingSidebar paneOpacity={paneOpacity} />
|
<ModelingSidebar paneOpacity={paneOpacity} />
|
||||||
<Stream />
|
<Stream />
|
||||||
{/* <CamToggle /> */}
|
{/* <CamToggle /> */}
|
||||||
<LowerRightControls coreDumpManager={coreDumpManager}>
|
<LowerRightControls>
|
||||||
<Gizmo />
|
<Gizmo />
|
||||||
</LowerRightControls>
|
</LowerRightControls>
|
||||||
</div>
|
</div>
|
||||||
|
@ -534,7 +534,7 @@ export class SceneEntities {
|
|||||||
segmentName: 'line' | 'tangentialArcTo' = 'line',
|
segmentName: 'line' | 'tangentialArcTo' = 'line',
|
||||||
shouldTearDown = true
|
shouldTearDown = true
|
||||||
) => {
|
) => {
|
||||||
const _ast = kclManager.ast
|
const _ast = JSON.parse(JSON.stringify(kclManager.ast))
|
||||||
|
|
||||||
const _node1 = getNodeFromPath<VariableDeclaration>(
|
const _node1 = getNodeFromPath<VariableDeclaration>(
|
||||||
_ast,
|
_ast,
|
||||||
@ -692,7 +692,7 @@ export class SceneEntities {
|
|||||||
sketchOrigin: [number, number, number],
|
sketchOrigin: [number, number, number],
|
||||||
rectangleOrigin: [x: number, y: number]
|
rectangleOrigin: [x: number, y: number]
|
||||||
) => {
|
) => {
|
||||||
let _ast = kclManager.ast
|
let _ast = JSON.parse(JSON.stringify(kclManager.ast))
|
||||||
|
|
||||||
const _node1 = getNodeFromPath<VariableDeclaration>(
|
const _node1 = getNodeFromPath<VariableDeclaration>(
|
||||||
_ast,
|
_ast,
|
||||||
@ -723,9 +723,7 @@ export class SceneEntities {
|
|||||||
...getRectangleCallExpressions(rectangleOrigin, tags),
|
...getRectangleCallExpressions(rectangleOrigin, tags),
|
||||||
])
|
])
|
||||||
|
|
||||||
let result = parse(recast(_ast))
|
_ast = parse(recast(_ast))
|
||||||
if (trap(result)) return Promise.reject(result)
|
|
||||||
_ast = result
|
|
||||||
|
|
||||||
const { programMemoryOverride, truncatedAst } = await this.setupSketch({
|
const { programMemoryOverride, truncatedAst } = await this.setupSketch({
|
||||||
sketchPathToNode,
|
sketchPathToNode,
|
||||||
@ -739,7 +737,7 @@ export class SceneEntities {
|
|||||||
sceneInfra.setCallbacks({
|
sceneInfra.setCallbacks({
|
||||||
onMove: async (args) => {
|
onMove: async (args) => {
|
||||||
// Update the width and height of the draft rectangle
|
// Update the width and height of the draft rectangle
|
||||||
const pathToNodeTwo = sketchPathToNode
|
const pathToNodeTwo = JSON.parse(JSON.stringify(sketchPathToNode))
|
||||||
pathToNodeTwo[1][0] = 0
|
pathToNodeTwo[1][0] = 0
|
||||||
|
|
||||||
const _node = getNodeFromPath<VariableDeclaration>(
|
const _node = getNodeFromPath<VariableDeclaration>(
|
||||||
@ -801,9 +799,7 @@ export class SceneEntities {
|
|||||||
if (sketchInit.type === 'PipeExpression') {
|
if (sketchInit.type === 'PipeExpression') {
|
||||||
updateRectangleSketch(sketchInit, x, y, tags[0])
|
updateRectangleSketch(sketchInit, x, y, tags[0])
|
||||||
|
|
||||||
let result = parse(recast(_ast))
|
_ast = parse(recast(_ast))
|
||||||
if (trap(result)) return Promise.reject(result)
|
|
||||||
_ast = result
|
|
||||||
|
|
||||||
// Update the primary AST and unequip the rectangle tool
|
// Update the primary AST and unequip the rectangle tool
|
||||||
await kclManager.executeAstMock(_ast)
|
await kclManager.executeAstMock(_ast)
|
||||||
@ -1007,8 +1003,10 @@ export class SceneEntities {
|
|||||||
PROFILE_START,
|
PROFILE_START,
|
||||||
])
|
])
|
||||||
if (!group) return
|
if (!group) return
|
||||||
const pathToNode: PathToNode = group.userData.pathToNode
|
const pathToNode: PathToNode = JSON.parse(
|
||||||
const varDecIndex: number = pathToNode[1][0] as number
|
JSON.stringify(group.userData.pathToNode)
|
||||||
|
)
|
||||||
|
const varDecIndex = JSON.parse(JSON.stringify(pathToNode[1][0]))
|
||||||
if (draftInfo) {
|
if (draftInfo) {
|
||||||
pathToNode[1][0] = 0
|
pathToNode[1][0] = 0
|
||||||
}
|
}
|
||||||
@ -1721,7 +1719,7 @@ function prepareTruncatedMemoryAndAst(
|
|||||||
}
|
}
|
||||||
| Error {
|
| Error {
|
||||||
const bodyIndex = Number(sketchPathToNode?.[1]?.[0]) || 0
|
const bodyIndex = Number(sketchPathToNode?.[1]?.[0]) || 0
|
||||||
const _ast = ast
|
const _ast = JSON.parse(JSON.stringify(ast))
|
||||||
|
|
||||||
const _node = getNodeFromPath<VariableDeclaration>(
|
const _node = getNodeFromPath<VariableDeclaration>(
|
||||||
_ast,
|
_ast,
|
||||||
@ -1780,7 +1778,7 @@ function prepareTruncatedMemoryAndAst(
|
|||||||
}
|
}
|
||||||
const truncatedAst: Program = {
|
const truncatedAst: Program = {
|
||||||
..._ast,
|
..._ast,
|
||||||
body: [_ast.body[bodyIndex]],
|
body: [JSON.parse(JSON.stringify(_ast.body[bodyIndex]))],
|
||||||
}
|
}
|
||||||
const programMemoryOverride = programMemoryInit()
|
const programMemoryOverride = programMemoryInit()
|
||||||
if (err(programMemoryOverride)) return programMemoryOverride
|
if (err(programMemoryOverride)) return programMemoryOverride
|
||||||
@ -1806,7 +1804,7 @@ function prepareTruncatedMemoryAndAst(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (value.type === 'TagIdentifier') {
|
if (value.type === 'TagIdentifier') {
|
||||||
programMemoryOverride.root[key] = value
|
programMemoryOverride.root[key] = JSON.parse(JSON.stringify(value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1821,7 +1819,7 @@ function prepareTruncatedMemoryAndAst(
|
|||||||
if (!memoryItem) {
|
if (!memoryItem) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
programMemoryOverride.root[name] = memoryItem
|
programMemoryOverride.root[name] = JSON.parse(JSON.stringify(memoryItem))
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
truncatedAst,
|
truncatedAst,
|
||||||
@ -1969,9 +1967,9 @@ export async function getSketchOrientationDetails(
|
|||||||
* @param entityId - The ID of the entity for which orientation details are being fetched.
|
* @param entityId - The ID of the entity for which orientation details are being fetched.
|
||||||
* @returns A promise that resolves with the orientation details of the face.
|
* @returns A promise that resolves with the orientation details of the face.
|
||||||
*/
|
*/
|
||||||
export async function getFaceDetails(
|
async function getFaceDetails(
|
||||||
entityId: string
|
entityId: string
|
||||||
): Promise<Models['GetSketchModePlane_type']> {
|
): Promise<Models['FaceIsPlanar_type']> {
|
||||||
// TODO mode engine connection to allow batching returns and batch the following
|
// TODO mode engine connection to allow batching returns and batch the following
|
||||||
await engineCommandManager.sendSceneCommand({
|
await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
@ -1984,7 +1982,8 @@ export async function getFaceDetails(
|
|||||||
entity_id: entityId,
|
entity_id: entityId,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
const faceInfo: Models['GetSketchModePlane_type'] = (
|
// TODO change typing to get_sketch_mode_plane once lib is updated
|
||||||
|
const faceInfo: Models['FaceIsPlanar_type'] = (
|
||||||
await engineCommandManager.sendSceneCommand({
|
await engineCommandManager.sendSceneCommand({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd_id: uuidv4(),
|
cmd_id: uuidv4(),
|
||||||
|
@ -151,7 +151,9 @@ export function useCalc({
|
|||||||
ast,
|
ast,
|
||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
useFakeExecutor: true,
|
useFakeExecutor: true,
|
||||||
programMemoryOverride: kclManager.programMemory,
|
programMemoryOverride: JSON.parse(
|
||||||
|
JSON.stringify(kclManager.programMemory)
|
||||||
|
),
|
||||||
}).then(({ programMemory }) => {
|
}).then(({ programMemory }) => {
|
||||||
const resultDeclaration = ast.body.find(
|
const resultDeclaration = ast.body.find(
|
||||||
(a) =>
|
(a) =>
|
||||||
|
@ -6,18 +6,8 @@ import { NetworkHealthIndicator } from 'components/NetworkHealthIndicator'
|
|||||||
import { HelpMenu } from './HelpMenu'
|
import { HelpMenu } from './HelpMenu'
|
||||||
import { Link, useLocation } from 'react-router-dom'
|
import { Link, useLocation } from 'react-router-dom'
|
||||||
import { useAbsoluteFilePath } from 'hooks/useAbsoluteFilePath'
|
import { useAbsoluteFilePath } from 'hooks/useAbsoluteFilePath'
|
||||||
import { coreDump } from 'lang/wasm'
|
|
||||||
import toast from 'react-hot-toast'
|
|
||||||
import { CoreDumpManager } from 'lib/coredump'
|
|
||||||
import openWindow from 'lib/openWindow'
|
|
||||||
|
|
||||||
export function LowerRightControls({
|
export function LowerRightControls(props: React.PropsWithChildren) {
|
||||||
children,
|
|
||||||
coreDumpManager,
|
|
||||||
}: {
|
|
||||||
children?: React.ReactNode
|
|
||||||
coreDumpManager?: CoreDumpManager
|
|
||||||
}) {
|
|
||||||
const location = useLocation()
|
const location = useLocation()
|
||||||
const filePath = useAbsoluteFilePath()
|
const filePath = useAbsoluteFilePath()
|
||||||
const linkOverrideClassName =
|
const linkOverrideClassName =
|
||||||
@ -25,42 +15,9 @@ export function LowerRightControls({
|
|||||||
|
|
||||||
const isPlayWright = window?.localStorage.getItem('playwright') === 'true'
|
const isPlayWright = window?.localStorage.getItem('playwright') === 'true'
|
||||||
|
|
||||||
async function reportbug(event: { preventDefault: () => void }) {
|
|
||||||
event?.preventDefault()
|
|
||||||
|
|
||||||
if (!coreDumpManager) {
|
|
||||||
// open default reporting option
|
|
||||||
openWindow('https://github.com/KittyCAD/modeling-app/issues/new/choose')
|
|
||||||
} else {
|
|
||||||
toast
|
|
||||||
.promise(
|
|
||||||
coreDump(coreDumpManager, true),
|
|
||||||
{
|
|
||||||
loading: 'Preparing bug report...',
|
|
||||||
success: 'Bug report opened in new window',
|
|
||||||
error: 'Unable to export a core dump. Using default reporting.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
success: {
|
|
||||||
// Note: this extended duration is especially important for Playwright e2e testing
|
|
||||||
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
|
|
||||||
duration: 6000,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.catch((err: Error) => {
|
|
||||||
if (err) {
|
|
||||||
openWindow(
|
|
||||||
'https://github.com/KittyCAD/modeling-app/issues/new/choose'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="fixed bottom-2 right-2 flex flex-col items-end gap-3 pointer-events-none">
|
<section className="fixed bottom-2 right-2 flex flex-col items-end gap-3 pointer-events-none">
|
||||||
{children}
|
{props.children}
|
||||||
<menu className="flex items-center justify-end gap-3 pointer-events-auto">
|
<menu className="flex items-center justify-end gap-3 pointer-events-auto">
|
||||||
<a
|
<a
|
||||||
href={`https://github.com/KittyCAD/modeling-app/releases/tag/v${APP_VERSION}`}
|
href={`https://github.com/KittyCAD/modeling-app/releases/tag/v${APP_VERSION}`}
|
||||||
@ -71,7 +28,6 @@ export function LowerRightControls({
|
|||||||
v{isPlayWright ? '11.22.33' : APP_VERSION}
|
v{isPlayWright ? '11.22.33' : APP_VERSION}
|
||||||
</a>
|
</a>
|
||||||
<a
|
<a
|
||||||
onClick={reportbug}
|
|
||||||
href="https://github.com/KittyCAD/modeling-app/issues/new/choose"
|
href="https://github.com/KittyCAD/modeling-app/issues/new/choose"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
rel="noopener noreferrer"
|
rel="noopener noreferrer"
|
||||||
|
@ -7,8 +7,7 @@ import React, {
|
|||||||
useContext,
|
useContext,
|
||||||
useState,
|
useState,
|
||||||
} from 'react'
|
} from 'react'
|
||||||
import { FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
import LspServerClient from '../editor/plugins/lsp/client'
|
||||||
import Client from '../editor/plugins/lsp/client'
|
|
||||||
import { TEST, VITE_KC_API_BASE_URL } from 'env'
|
import { TEST, VITE_KC_API_BASE_URL } from 'env'
|
||||||
import kclLanguage from 'editor/plugins/lsp/kcl/language'
|
import kclLanguage from 'editor/plugins/lsp/kcl/language'
|
||||||
import { copilotPlugin } from 'editor/plugins/lsp/copilot'
|
import { copilotPlugin } from 'editor/plugins/lsp/copilot'
|
||||||
@ -19,9 +18,7 @@ import { LanguageSupport } from '@codemirror/language'
|
|||||||
import { useNavigate } from 'react-router-dom'
|
import { useNavigate } from 'react-router-dom'
|
||||||
import { paths } from 'lib/paths'
|
import { paths } from 'lib/paths'
|
||||||
import { FileEntry } from 'lib/types'
|
import { FileEntry } from 'lib/types'
|
||||||
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
|
||||||
import {
|
import {
|
||||||
LspWorkerEventType,
|
|
||||||
KclWorkerOptions,
|
KclWorkerOptions,
|
||||||
CopilotWorkerOptions,
|
CopilotWorkerOptions,
|
||||||
LspWorker,
|
LspWorker,
|
||||||
@ -30,7 +27,6 @@ import { wasmUrl } from 'lang/wasm'
|
|||||||
import { PROJECT_ENTRYPOINT } from 'lib/constants'
|
import { PROJECT_ENTRYPOINT } from 'lib/constants'
|
||||||
import { useNetworkContext } from 'hooks/useNetworkContext'
|
import { useNetworkContext } from 'hooks/useNetworkContext'
|
||||||
import { NetworkHealthState } from 'hooks/useNetworkStatus'
|
import { NetworkHealthState } from 'hooks/useNetworkStatus'
|
||||||
import { err } from 'lib/trap'
|
|
||||||
|
|
||||||
function getWorkspaceFolders(): LSP.WorkspaceFolder[] {
|
function getWorkspaceFolders(): LSP.WorkspaceFolder[] {
|
||||||
return []
|
return []
|
||||||
@ -107,32 +103,23 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
|||||||
return { lspClient: null }
|
return { lspClient: null }
|
||||||
}
|
}
|
||||||
|
|
||||||
const lspWorker = new Worker({ name: 'kcl' })
|
const options: KclWorkerOptions = {
|
||||||
const initEvent: KclWorkerOptions = {
|
|
||||||
wasmUrl: wasmUrl(),
|
|
||||||
token: token,
|
token: token,
|
||||||
baseUnit: defaultUnit.current,
|
baseUnit: defaultUnit.current,
|
||||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||||
|
callback: () => {
|
||||||
|
setIsLspReady(true)
|
||||||
|
},
|
||||||
|
wasmUrl: wasmUrl(),
|
||||||
}
|
}
|
||||||
lspWorker.postMessage({
|
|
||||||
worker: LspWorker.Kcl,
|
const lsp = new LspServerClient({ worker: LspWorker.Kcl, options })
|
||||||
eventType: LspWorkerEventType.Init,
|
lsp.startServer()
|
||||||
eventData: initEvent,
|
|
||||||
|
const lspClient = new LanguageServerClient({
|
||||||
|
client: lsp,
|
||||||
|
name: LspWorker.Kcl,
|
||||||
})
|
})
|
||||||
lspWorker.onmessage = function (e) {
|
|
||||||
if (err(fromServer)) return
|
|
||||||
fromServer.add(e.data)
|
|
||||||
}
|
|
||||||
|
|
||||||
const intoServer: IntoServer = new IntoServer(LspWorker.Kcl, lspWorker)
|
|
||||||
const fromServer: FromServer | Error = FromServer.create()
|
|
||||||
if (err(fromServer)) return { lspClient: null }
|
|
||||||
|
|
||||||
const client = new Client(fromServer, intoServer)
|
|
||||||
|
|
||||||
setIsLspReady(true)
|
|
||||||
|
|
||||||
const lspClient = new LanguageServerClient({ client, name: LspWorker.Kcl })
|
|
||||||
return { lspClient }
|
return { lspClient }
|
||||||
}, [
|
}, [
|
||||||
// We need a token for authenticating the server.
|
// We need a token for authenticating the server.
|
||||||
@ -185,32 +172,19 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
|||||||
return { lspClient: null }
|
return { lspClient: null }
|
||||||
}
|
}
|
||||||
|
|
||||||
const lspWorker = new Worker({ name: 'copilot' })
|
const options: CopilotWorkerOptions = {
|
||||||
const initEvent: CopilotWorkerOptions = {
|
|
||||||
wasmUrl: wasmUrl(),
|
|
||||||
token: token,
|
token: token,
|
||||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||||
|
callback: () => {
|
||||||
|
setIsCopilotReady(true)
|
||||||
|
},
|
||||||
|
wasmUrl: wasmUrl(),
|
||||||
}
|
}
|
||||||
lspWorker.postMessage({
|
const lsp = new LspServerClient({ worker: LspWorker.Copilot, options })
|
||||||
worker: LspWorker.Copilot,
|
lsp.startServer()
|
||||||
eventType: LspWorkerEventType.Init,
|
|
||||||
eventData: initEvent,
|
|
||||||
})
|
|
||||||
lspWorker.onmessage = function (e) {
|
|
||||||
if (err(fromServer)) return
|
|
||||||
fromServer.add(e.data)
|
|
||||||
}
|
|
||||||
|
|
||||||
const intoServer: IntoServer = new IntoServer(LspWorker.Copilot, lspWorker)
|
|
||||||
const fromServer: FromServer | Error = FromServer.create()
|
|
||||||
if (err(fromServer)) return { lspClient: null }
|
|
||||||
|
|
||||||
const client = new Client(fromServer, intoServer)
|
|
||||||
|
|
||||||
setIsCopilotReady(true)
|
|
||||||
|
|
||||||
const lspClient = new LanguageServerClient({
|
const lspClient = new LanguageServerClient({
|
||||||
client,
|
client: lsp,
|
||||||
name: LspWorker.Copilot,
|
name: LspWorker.Copilot,
|
||||||
})
|
})
|
||||||
return { lspClient }
|
return { lspClient }
|
||||||
|
@ -23,7 +23,6 @@ import {
|
|||||||
editorManager,
|
editorManager,
|
||||||
sceneEntitiesManager,
|
sceneEntitiesManager,
|
||||||
} from 'lib/singletons'
|
} from 'lib/singletons'
|
||||||
import { useHotkeys } from 'react-hotkeys-hook'
|
|
||||||
import { applyConstraintHorzVertDistance } from './Toolbar/SetHorzVertDistance'
|
import { applyConstraintHorzVertDistance } from './Toolbar/SetHorzVertDistance'
|
||||||
import {
|
import {
|
||||||
angleBetweenInfo,
|
angleBetweenInfo,
|
||||||
@ -79,7 +78,6 @@ import { getVarNameModal } from 'hooks/useToolbarGuards'
|
|||||||
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
||||||
import { uuidv4 } from 'lib/utils'
|
import { uuidv4 } from 'lib/utils'
|
||||||
import { err, trap } from 'lib/trap'
|
import { err, trap } from 'lib/trap'
|
||||||
import { useCommandsContext } from 'hooks/useCommandsContext'
|
|
||||||
|
|
||||||
type MachineContext<T extends AnyStateMachine> = {
|
type MachineContext<T extends AnyStateMachine> = {
|
||||||
state: StateFrom<T>
|
state: StateFrom<T>
|
||||||
@ -126,6 +124,7 @@ export const ModelingMachineProvider = ({
|
|||||||
token
|
token
|
||||||
)
|
)
|
||||||
useHotkeyWrapper(['meta + shift + .'], () => {
|
useHotkeyWrapper(['meta + shift + .'], () => {
|
||||||
|
console.warn('CoreDump: Initializing core dump')
|
||||||
toast.promise(
|
toast.promise(
|
||||||
coreDump(coreDumpManager, true),
|
coreDump(coreDumpManager, true),
|
||||||
{
|
{
|
||||||
@ -142,7 +141,6 @@ export const ModelingMachineProvider = ({
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
const { commandBarState } = useCommandsContext()
|
|
||||||
|
|
||||||
// Settings machine setup
|
// Settings machine setup
|
||||||
// const retrievedSettings = useRef(
|
// const retrievedSettings = useRef(
|
||||||
@ -328,11 +326,6 @@ export const ModelingMachineProvider = ({
|
|||||||
)
|
)
|
||||||
updateSceneObjectColors()
|
updateSceneObjectColors()
|
||||||
|
|
||||||
// side effect to stop code mirror from updating the same selections again
|
|
||||||
editorManager.lastSelection = selections.codeBasedSelections
|
|
||||||
.map(({ range }) => `${range[1]}->${range[1]}`)
|
|
||||||
.join('&')
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
selectionRanges: selections,
|
selectionRanges: selections,
|
||||||
}
|
}
|
||||||
@ -467,11 +460,6 @@ export const ModelingMachineProvider = ({
|
|||||||
|
|
||||||
return canExtrudeSelection(selectionRanges)
|
return canExtrudeSelection(selectionRanges)
|
||||||
},
|
},
|
||||||
'has valid selection for deletion': ({ selectionRanges }) => {
|
|
||||||
if (!commandBarState.matches('Closed')) return false
|
|
||||||
if (selectionRanges.codeBasedSelections.length <= 0) return false
|
|
||||||
return true
|
|
||||||
},
|
|
||||||
'Sketch is empty': ({ sketchDetails }) => {
|
'Sketch is empty': ({ sketchDetails }) => {
|
||||||
const node = getNodeFromPath<VariableDeclaration>(
|
const node = getNodeFromPath<VariableDeclaration>(
|
||||||
kclManager.ast,
|
kclManager.ast,
|
||||||
@ -513,7 +501,7 @@ export const ModelingMachineProvider = ({
|
|||||||
services: {
|
services: {
|
||||||
'AST-undo-startSketchOn': async ({ sketchDetails }) => {
|
'AST-undo-startSketchOn': async ({ sketchDetails }) => {
|
||||||
if (!sketchDetails) return
|
if (!sketchDetails) return
|
||||||
const newAst: Program = kclManager.ast
|
const newAst: Program = JSON.parse(JSON.stringify(kclManager.ast))
|
||||||
const varDecIndex = sketchDetails.sketchPathToNode[1][0]
|
const varDecIndex = sketchDetails.sketchPathToNode[1][0]
|
||||||
// remove body item at varDecIndex
|
// remove body item at varDecIndex
|
||||||
newAst.body = newAst.body.filter((_, i) => i !== varDecIndex)
|
newAst.body = newAst.body.filter((_, i) => i !== varDecIndex)
|
||||||
@ -935,11 +923,6 @@ export const ModelingMachineProvider = ({
|
|||||||
}
|
}
|
||||||
}, [modelingSend])
|
}, [modelingSend])
|
||||||
|
|
||||||
// Allow using the delete key to delete solids
|
|
||||||
useHotkeys(['backspace', 'delete', 'del'], () => {
|
|
||||||
modelingSend({ type: 'Delete selection' })
|
|
||||||
})
|
|
||||||
|
|
||||||
useStateMachineCommands({
|
useStateMachineCommands({
|
||||||
machineId: 'modeling',
|
machineId: 'modeling',
|
||||||
state: modelingState,
|
state: modelingState,
|
||||||
|
@ -1,25 +1,7 @@
|
|||||||
import { coreDump } from 'lang/wasm'
|
|
||||||
import { CoreDumpManager } from 'lib/coredump'
|
|
||||||
import { CustomIcon } from './CustomIcon'
|
import { CustomIcon } from './CustomIcon'
|
||||||
import { engineCommandManager } from 'lib/singletons'
|
|
||||||
import React from 'react'
|
|
||||||
import toast from 'react-hot-toast'
|
|
||||||
import Tooltip from './Tooltip'
|
import Tooltip from './Tooltip'
|
||||||
import { useStore } from 'useStore'
|
|
||||||
import { useSettingsAuthContext } from 'hooks/useSettingsAuthContext'
|
|
||||||
|
|
||||||
export const RefreshButton = ({ children }: React.PropsWithChildren) => {
|
|
||||||
const { auth } = useSettingsAuthContext()
|
|
||||||
const token = auth?.context?.token
|
|
||||||
const { htmlRef } = useStore((s) => ({
|
|
||||||
htmlRef: s.htmlRef,
|
|
||||||
}))
|
|
||||||
const coreDumpManager = new CoreDumpManager(
|
|
||||||
engineCommandManager,
|
|
||||||
htmlRef,
|
|
||||||
token
|
|
||||||
)
|
|
||||||
|
|
||||||
|
export function RefreshButton() {
|
||||||
async function refresh() {
|
async function refresh() {
|
||||||
if (window && 'plausible' in window) {
|
if (window && 'plausible' in window) {
|
||||||
const p = window.plausible as (
|
const p = window.plausible as (
|
||||||
@ -35,26 +17,8 @@ export const RefreshButton = ({ children }: React.PropsWithChildren) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
toast
|
// Window may not be available in some environments
|
||||||
.promise(
|
window?.location.reload()
|
||||||
coreDump(coreDumpManager, true),
|
|
||||||
{
|
|
||||||
loading: 'Starting core dump...',
|
|
||||||
success: 'Core dump completed successfully',
|
|
||||||
error: 'Error while exporting core dump',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
success: {
|
|
||||||
// Note: this extended duration is especially important for Playwright e2e testing
|
|
||||||
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
|
|
||||||
duration: 6000,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
.then(() => {
|
|
||||||
// Window may not be available in some environments
|
|
||||||
window?.location.reload()
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -83,7 +83,6 @@ export const Stream = ({ className = '' }: { className?: string }) => {
|
|||||||
if (!videoRef.current) return
|
if (!videoRef.current) return
|
||||||
if (state.matches('Sketch')) return
|
if (state.matches('Sketch')) return
|
||||||
if (state.matches('Sketch no face')) return
|
if (state.matches('Sketch no face')) return
|
||||||
|
|
||||||
const { x, y } = getNormalisedCoordinates({
|
const { x, y } = getNormalisedCoordinates({
|
||||||
clientX: e.clientX,
|
clientX: e.clientX,
|
||||||
clientY: e.clientY,
|
clientY: e.clientY,
|
||||||
|
@ -145,7 +145,7 @@ export async function applyConstraintIntersect({
|
|||||||
const { transforms, forcedSelectionRanges } = info
|
const { transforms, forcedSelectionRanges } = info
|
||||||
|
|
||||||
const transform1 = transformSecondarySketchLinesTagFirst({
|
const transform1 = transformSecondarySketchLinesTagFirst({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges: forcedSelectionRanges,
|
selectionRanges: forcedSelectionRanges,
|
||||||
transformInfos: transforms,
|
transformInfos: transforms,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
|
@ -106,7 +106,7 @@ export async function applyConstraintAbsDistance({
|
|||||||
const transformInfos = info.transforms
|
const transformInfos = info.transforms
|
||||||
|
|
||||||
const transform1 = transformAstSketchLines({
|
const transform1 = transformAstSketchLines({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges: selectionRanges,
|
selectionRanges: selectionRanges,
|
||||||
transformInfos,
|
transformInfos,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
@ -128,7 +128,7 @@ export async function applyConstraintAbsDistance({
|
|||||||
)
|
)
|
||||||
|
|
||||||
const transform2 = transformAstSketchLines({
|
const transform2 = transformAstSketchLines({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges: selectionRanges,
|
selectionRanges: selectionRanges,
|
||||||
transformInfos,
|
transformInfos,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
@ -176,7 +176,7 @@ export function applyConstraintAxisAlign({
|
|||||||
let finalValue = createIdentifier('ZERO')
|
let finalValue = createIdentifier('ZERO')
|
||||||
|
|
||||||
return transformAstSketchLines({
|
return transformAstSketchLines({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges: selectionRanges,
|
selectionRanges: selectionRanges,
|
||||||
transformInfos,
|
transformInfos,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
|
@ -100,7 +100,7 @@ export async function applyConstraintAngleBetween({
|
|||||||
const transformInfos = info.transforms
|
const transformInfos = info.transforms
|
||||||
|
|
||||||
const transformed1 = transformSecondarySketchLinesTagFirst({
|
const transformed1 = transformSecondarySketchLinesTagFirst({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges,
|
selectionRanges,
|
||||||
transformInfos,
|
transformInfos,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
|
@ -108,7 +108,7 @@ export async function applyConstraintHorzVertDistance({
|
|||||||
if (err(info)) return Promise.reject(info)
|
if (err(info)) return Promise.reject(info)
|
||||||
const transformInfos = info.transforms
|
const transformInfos = info.transforms
|
||||||
const transformed = transformSecondarySketchLinesTagFirst({
|
const transformed = transformSecondarySketchLinesTagFirst({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges,
|
selectionRanges,
|
||||||
transformInfos,
|
transformInfos,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
|
@ -84,7 +84,7 @@ export async function applyConstraintAngleLength({
|
|||||||
|
|
||||||
const { transforms } = angleLength
|
const { transforms } = angleLength
|
||||||
const sketched = transformAstSketchLines({
|
const sketched = transformAstSketchLines({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges,
|
selectionRanges,
|
||||||
transformInfos: transforms,
|
transformInfos: transforms,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
@ -139,7 +139,7 @@ export async function applyConstraintAngleLength({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const retval = transformAstSketchLines({
|
const retval = transformAstSketchLines({
|
||||||
ast: kclManager.ast,
|
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||||
selectionRanges,
|
selectionRanges,
|
||||||
transformInfos: transforms,
|
transformInfos: transforms,
|
||||||
programMemory: kclManager.programMemory,
|
programMemory: kclManager.programMemory,
|
||||||
|
@ -23,7 +23,7 @@ export default class EditorManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private _lastSelectionEvent: number | null = null
|
private _lastSelectionEvent: number | null = null
|
||||||
lastSelection: string = ''
|
private _lastSelection: string = ''
|
||||||
private _lastEvent: { event: string; time: number } | null = null
|
private _lastEvent: { event: string; time: number } | null = null
|
||||||
|
|
||||||
private _modelingSend: (eventInfo: ModelingMachineEvent) => void = () => {}
|
private _modelingSend: (eventInfo: ModelingMachineEvent) => void = () => {}
|
||||||
@ -199,14 +199,12 @@ export default class EditorManager {
|
|||||||
viewUpdate?.state?.selection?.ranges || []
|
viewUpdate?.state?.selection?.ranges || []
|
||||||
)
|
)
|
||||||
|
|
||||||
if (selString === this.lastSelection) {
|
if (selString === this._lastSelection) {
|
||||||
// onUpdate is noisy and is fired a lot by extensions
|
// onUpdate is noisy and is fired a lot by extensions
|
||||||
// since we're only interested in selections changes we can ignore most of these.
|
// since we're only interested in selections changes we can ignore most of these.
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// note this is also set from the "Set selection" action to stop code mirror from updating selections right after
|
this._lastSelection = selString
|
||||||
// selections are made from the scene
|
|
||||||
this.lastSelection = selString
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
this._lastSelectionEvent &&
|
this._lastSelectionEvent &&
|
||||||
|
@ -1,197 +1,54 @@
|
|||||||
import * as jsrpc from 'json-rpc-2.0'
|
import { LspContext, LspWorkerEventType } from './types'
|
||||||
import * as LSP from 'vscode-languageserver-protocol'
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
registerServerCapability,
|
LanguageClient,
|
||||||
unregisterServerCapability,
|
LanguageClientOptions,
|
||||||
} from './server-capability-registration'
|
} from 'vscode-languageclient/browser'
|
||||||
import { Codec, FromServer, IntoServer } from './codec'
|
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
||||||
import { err } from 'lib/trap'
|
|
||||||
|
|
||||||
const client_capabilities: LSP.ClientCapabilities = {
|
export default class LspServerClient {
|
||||||
textDocument: {
|
context: LspContext
|
||||||
hover: {
|
client: LanguageClient | null = null
|
||||||
dynamicRegistration: true,
|
worker: Worker | null = null
|
||||||
contentFormat: ['plaintext', 'markdown'],
|
|
||||||
},
|
|
||||||
moniker: {},
|
|
||||||
synchronization: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
willSave: false,
|
|
||||||
didSave: false,
|
|
||||||
willSaveWaitUntil: false,
|
|
||||||
},
|
|
||||||
completion: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
completionItem: {
|
|
||||||
snippetSupport: false,
|
|
||||||
commitCharactersSupport: true,
|
|
||||||
documentationFormat: ['plaintext', 'markdown'],
|
|
||||||
deprecatedSupport: false,
|
|
||||||
preselectSupport: false,
|
|
||||||
},
|
|
||||||
contextSupport: false,
|
|
||||||
},
|
|
||||||
signatureHelp: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
signatureInformation: {
|
|
||||||
documentationFormat: ['plaintext', 'markdown'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
declaration: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
linkSupport: true,
|
|
||||||
},
|
|
||||||
definition: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
linkSupport: true,
|
|
||||||
},
|
|
||||||
typeDefinition: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
linkSupport: true,
|
|
||||||
},
|
|
||||||
implementation: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
linkSupport: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
workspace: {
|
|
||||||
didChangeConfiguration: {
|
|
||||||
dynamicRegistration: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
export default class Client extends jsrpc.JSONRPCServerAndClient {
|
constructor(context: LspContext) {
|
||||||
afterInitializedHooks: (() => Promise<void>)[] = []
|
this.context = context
|
||||||
#fromServer: FromServer
|
|
||||||
private serverCapabilities: LSP.ServerCapabilities<any> = {}
|
|
||||||
private notifyFn: ((message: LSP.NotificationMessage) => void) | null = null
|
|
||||||
|
|
||||||
constructor(fromServer: FromServer, intoServer: IntoServer) {
|
|
||||||
super(
|
|
||||||
new jsrpc.JSONRPCServer(),
|
|
||||||
new jsrpc.JSONRPCClient(async (json: jsrpc.JSONRPCRequest) => {
|
|
||||||
const encoded = Codec.encode(json)
|
|
||||||
intoServer.enqueue(encoded)
|
|
||||||
if (null != json.id) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
||||||
const response = await fromServer.responses.get(json.id)
|
|
||||||
this.client.receive(response as jsrpc.JSONRPCResponse)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
this.#fromServer = fromServer
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async start(): Promise<void> {
|
async startServer() {
|
||||||
// process "window/logMessage": client <- server
|
this.worker = new Worker({ name: this.context.worker })
|
||||||
this.addMethod(LSP.LogMessageNotification.type.method, (params) => {
|
this.worker.postMessage({
|
||||||
const { type, message } = params as {
|
worker: this.context.worker,
|
||||||
type: LSP.MessageType
|
eventType: LspWorkerEventType.Init,
|
||||||
message: string
|
eventData: this.context.options,
|
||||||
}
|
})
|
||||||
let messageString = ''
|
}
|
||||||
switch (type) {
|
|
||||||
case LSP.MessageType.Error: {
|
async startClient() {
|
||||||
messageString += '[error] '
|
const clientOptions: LanguageClientOptions = {
|
||||||
break
|
documentSelector: [{ language: 'kcl' }],
|
||||||
}
|
diagnosticCollectionName: 'markers',
|
||||||
case LSP.MessageType.Warning: {
|
}
|
||||||
messageString += ' [warn] '
|
|
||||||
break
|
if (!this.worker) {
|
||||||
}
|
console.error('Worker not initialized')
|
||||||
case LSP.MessageType.Info: {
|
|
||||||
messageString += ' [info] '
|
|
||||||
break
|
|
||||||
}
|
|
||||||
case LSP.MessageType.Log: {
|
|
||||||
messageString += ' [log] '
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
messageString += message
|
|
||||||
return
|
return
|
||||||
})
|
}
|
||||||
|
|
||||||
// process "client/registerCapability": client <- server
|
this.client = new LanguageClient(
|
||||||
this.addMethod(LSP.RegistrationRequest.type.method, (params) => {
|
this.context.worker + 'LspClient',
|
||||||
// Register a server capability.
|
this.context.worker + ' LSP Client',
|
||||||
params.registrations.forEach(
|
clientOptions,
|
||||||
(capabilityRegistration: LSP.Registration) => {
|
this.worker
|
||||||
const caps = registerServerCapability(
|
|
||||||
this.serverCapabilities,
|
|
||||||
capabilityRegistration
|
|
||||||
)
|
|
||||||
if (err(caps)) return (this.serverCapabilities = {})
|
|
||||||
this.serverCapabilities = caps
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
// process "client/unregisterCapability": client <- server
|
|
||||||
this.addMethod(LSP.UnregistrationRequest.type.method, (params) => {
|
|
||||||
// Unregister a server capability.
|
|
||||||
params.unregisterations.forEach(
|
|
||||||
(capabilityUnregistration: LSP.Unregistration) => {
|
|
||||||
const caps = unregisterServerCapability(
|
|
||||||
this.serverCapabilities,
|
|
||||||
capabilityUnregistration
|
|
||||||
)
|
|
||||||
if (err(caps)) return (this.serverCapabilities = {})
|
|
||||||
this.serverCapabilities = caps
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
// request "initialize": client <-> server
|
|
||||||
const { capabilities } = await this.request(
|
|
||||||
LSP.InitializeRequest.type.method,
|
|
||||||
{
|
|
||||||
processId: null,
|
|
||||||
clientInfo: {
|
|
||||||
name: 'kcl-language-client',
|
|
||||||
},
|
|
||||||
capabilities: client_capabilities,
|
|
||||||
rootUri: null,
|
|
||||||
} as LSP.InitializeParams
|
|
||||||
)
|
)
|
||||||
|
|
||||||
this.serverCapabilities = capabilities
|
try {
|
||||||
|
await this.client.start()
|
||||||
// notify "initialized": client --> server
|
} catch (error) {
|
||||||
this.notify(LSP.InitializedNotification.type.method, {})
|
this.client.error(`Start failed`, error, 'force')
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
this.afterInitializedHooks.map((f: () => Promise<void>) => f())
|
|
||||||
)
|
|
||||||
await Promise.all([this.processNotifications(), this.processRequests()])
|
|
||||||
}
|
|
||||||
|
|
||||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
|
||||||
return this.serverCapabilities
|
|
||||||
}
|
|
||||||
|
|
||||||
setNotifyFn(fn: (message: LSP.NotificationMessage) => void): void {
|
|
||||||
this.notifyFn = fn
|
|
||||||
}
|
|
||||||
|
|
||||||
async processNotifications(): Promise<void> {
|
|
||||||
for await (const notification of this.#fromServer.notifications) {
|
|
||||||
if (this.notifyFn) {
|
|
||||||
this.notifyFn(notification)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async processRequests(): Promise<void> {
|
deactivate() {
|
||||||
for await (const request of this.#fromServer.requests) {
|
return this.client?.stop()
|
||||||
await this.receiveAndSend(request)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pushAfterInitializeHook(...hooks: (() => Promise<void>)[]): void {
|
|
||||||
this.afterInitializedHooks.push(...hooks)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
import * as jsrpc from 'json-rpc-2.0'
|
|
||||||
import * as vsrpc from 'vscode-jsonrpc'
|
|
||||||
|
|
||||||
import Bytes from './codec/bytes'
|
|
||||||
import StreamDemuxer from './codec/demuxer'
|
|
||||||
import Headers from './codec/headers'
|
|
||||||
import Queue from './codec/queue'
|
|
||||||
import Tracer from './tracer'
|
|
||||||
import { LspWorkerEventType, LspWorker } from './types'
|
|
||||||
|
|
||||||
export const encoder = new TextEncoder()
|
|
||||||
export const decoder = new TextDecoder()
|
|
||||||
|
|
||||||
export class Codec {
|
|
||||||
static encode(
|
|
||||||
json: jsrpc.JSONRPCRequest | jsrpc.JSONRPCResponse
|
|
||||||
): Uint8Array {
|
|
||||||
const message = JSON.stringify(json)
|
|
||||||
const delimited = Headers.add(message)
|
|
||||||
return Bytes.encode(delimited)
|
|
||||||
}
|
|
||||||
|
|
||||||
static decode<T>(data: Uint8Array): T {
|
|
||||||
const delimited = Bytes.decode(data)
|
|
||||||
const message = Headers.remove(delimited)
|
|
||||||
return JSON.parse(message) as T
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: tracing efficiency
|
|
||||||
export class IntoServer
|
|
||||||
extends Queue<Uint8Array>
|
|
||||||
implements AsyncGenerator<Uint8Array, never, void>
|
|
||||||
{
|
|
||||||
private worker: Worker | null = null
|
|
||||||
private type_: LspWorker | null = null
|
|
||||||
constructor(type_?: LspWorker, worker?: Worker) {
|
|
||||||
super()
|
|
||||||
if (worker && type_) {
|
|
||||||
this.worker = worker
|
|
||||||
this.type_ = type_
|
|
||||||
}
|
|
||||||
}
|
|
||||||
enqueue(item: Uint8Array): void {
|
|
||||||
Tracer.client(Headers.remove(decoder.decode(item)))
|
|
||||||
if (this.worker) {
|
|
||||||
this.worker.postMessage({
|
|
||||||
worker: this.type_,
|
|
||||||
eventType: LspWorkerEventType.Call,
|
|
||||||
eventData: item,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
super.enqueue(item)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface FromServer extends WritableStream<Uint8Array> {
|
|
||||||
readonly responses: {
|
|
||||||
get(key: number | string): null | Promise<vsrpc.ResponseMessage>
|
|
||||||
}
|
|
||||||
readonly notifications: AsyncGenerator<vsrpc.NotificationMessage, never, void>
|
|
||||||
readonly requests: AsyncGenerator<vsrpc.RequestMessage, never, void>
|
|
||||||
|
|
||||||
add(item: Uint8Array): void
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
|
||||||
export namespace FromServer {
|
|
||||||
export function create(): FromServer | Error {
|
|
||||||
// Calls private method .start() which can throw.
|
|
||||||
// This is an odd one of the bunch but try/catch seems most suitable here.
|
|
||||||
try {
|
|
||||||
return new StreamDemuxer()
|
|
||||||
} catch (e: any) {
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
import { encoder, decoder } from '../codec'
|
|
||||||
|
|
||||||
export default class Bytes {
|
|
||||||
static encode(input: string): Uint8Array {
|
|
||||||
return encoder.encode(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
static decode(input: Uint8Array): string {
|
|
||||||
return decoder.decode(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
static append<
|
|
||||||
T extends { length: number; set(arr: T, offset: number): void }
|
|
||||||
>(constructor: { new (length: number): T }, ...arrays: T[]) {
|
|
||||||
let totalLength = 0
|
|
||||||
for (const arr of arrays) {
|
|
||||||
totalLength += arr.length
|
|
||||||
}
|
|
||||||
const result = new constructor(totalLength)
|
|
||||||
let offset = 0
|
|
||||||
for (const arr of arrays) {
|
|
||||||
result.set(arr, offset)
|
|
||||||
offset += arr.length
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,101 +0,0 @@
|
|||||||
import * as vsrpc from 'vscode-jsonrpc'
|
|
||||||
|
|
||||||
import Bytes from './bytes'
|
|
||||||
import PromiseMap from './map'
|
|
||||||
import Queue from './queue'
|
|
||||||
import Tracer from '../tracer'
|
|
||||||
import { Codec } from '../codec'
|
|
||||||
|
|
||||||
export default class StreamDemuxer extends Queue<Uint8Array> {
|
|
||||||
readonly responses: PromiseMap<number | string, vsrpc.ResponseMessage> =
|
|
||||||
new PromiseMap()
|
|
||||||
readonly notifications: Queue<vsrpc.NotificationMessage> =
|
|
||||||
new Queue<vsrpc.NotificationMessage>()
|
|
||||||
readonly requests: Queue<vsrpc.RequestMessage> =
|
|
||||||
new Queue<vsrpc.RequestMessage>()
|
|
||||||
|
|
||||||
readonly #start: Promise<void>
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
super()
|
|
||||||
this.#start = this.start()
|
|
||||||
}
|
|
||||||
|
|
||||||
private async start(): Promise<void> {
|
|
||||||
let contentLength: null | number = null
|
|
||||||
let buffer = new Uint8Array()
|
|
||||||
|
|
||||||
for await (const bytes of this) {
|
|
||||||
buffer = Bytes.append(Uint8Array, buffer, bytes)
|
|
||||||
while (buffer.length > 0) {
|
|
||||||
// check if the content length is known
|
|
||||||
if (null == contentLength) {
|
|
||||||
// if not, try to match the prefixed headers
|
|
||||||
const match = Bytes.decode(buffer).match(
|
|
||||||
/^Content-Length:\s*(\d+)\s*/
|
|
||||||
)
|
|
||||||
if (null == match) continue
|
|
||||||
|
|
||||||
// try to parse the content-length from the headers
|
|
||||||
const length = parseInt(match[1])
|
|
||||||
|
|
||||||
if (isNaN(length))
|
|
||||||
return Promise.reject(new Error('invalid content length'))
|
|
||||||
|
|
||||||
// slice the headers since we now have the content length
|
|
||||||
buffer = buffer.slice(match[0].length)
|
|
||||||
|
|
||||||
// set the content length
|
|
||||||
contentLength = length
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the buffer doesn't contain a full message; await another iteration
|
|
||||||
if (buffer.length < contentLength) continue
|
|
||||||
|
|
||||||
// Get just the slice of the buffer that is our content length.
|
|
||||||
const slice = buffer.slice(0, contentLength)
|
|
||||||
|
|
||||||
// decode buffer to a string
|
|
||||||
const delimited = Bytes.decode(slice)
|
|
||||||
|
|
||||||
// reset the buffer
|
|
||||||
buffer = buffer.slice(contentLength)
|
|
||||||
// reset the contentLength
|
|
||||||
contentLength = null
|
|
||||||
|
|
||||||
const message = JSON.parse(delimited) as vsrpc.Message
|
|
||||||
Tracer.server(message)
|
|
||||||
|
|
||||||
// demux the message stream
|
|
||||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
|
||||||
this.responses.set(message.id, message)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (vsrpc.Message.isNotification(message)) {
|
|
||||||
this.notifications.enqueue(message)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (vsrpc.Message.isRequest(message)) {
|
|
||||||
this.requests.enqueue(message)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
add(bytes: Uint8Array): void {
|
|
||||||
const message = Codec.decode(bytes) as vsrpc.Message
|
|
||||||
Tracer.server(message)
|
|
||||||
|
|
||||||
// demux the message stream
|
|
||||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
|
||||||
this.responses.set(message.id, message)
|
|
||||||
}
|
|
||||||
if (vsrpc.Message.isNotification(message)) {
|
|
||||||
this.notifications.enqueue(message)
|
|
||||||
}
|
|
||||||
if (vsrpc.Message.isRequest(message)) {
|
|
||||||
this.requests.enqueue(message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,9 +0,0 @@
|
|||||||
export default class Headers {
|
|
||||||
static add(message: string): string {
|
|
||||||
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
|
||||||
}
|
|
||||||
|
|
||||||
static remove(delimited: string): string {
|
|
||||||
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,72 +0,0 @@
|
|||||||
export default class PromiseMap<K, V extends { toString(): string }> {
|
|
||||||
#map: Map<K, PromiseMap.Entry<V>> = new Map()
|
|
||||||
|
|
||||||
get(key: K & { toString(): string }): null | Promise<V> {
|
|
||||||
let initialized: PromiseMap.Entry<V>
|
|
||||||
// if the entry doesn't exist, set it
|
|
||||||
if (!this.#map.has(key)) {
|
|
||||||
initialized = this.#set(key)
|
|
||||||
} else {
|
|
||||||
// otherwise return the entry
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
||||||
initialized = this.#map.get(key)!
|
|
||||||
}
|
|
||||||
// if the entry is a pending promise, return it
|
|
||||||
if (initialized.status === 'pending') {
|
|
||||||
return initialized.promise
|
|
||||||
} else {
|
|
||||||
// otherwise return null
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#set(key: K, value?: V): PromiseMap.Entry<V> {
|
|
||||||
if (this.#map.has(key)) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
||||||
return this.#map.get(key)!
|
|
||||||
}
|
|
||||||
// placeholder resolver for entry
|
|
||||||
let resolve = (item: V) => {
|
|
||||||
void item
|
|
||||||
}
|
|
||||||
// promise for entry (which assigns the resolver
|
|
||||||
const promise = new Promise<V>((resolver) => {
|
|
||||||
resolve = resolver
|
|
||||||
})
|
|
||||||
// the initialized entry
|
|
||||||
const initialized: PromiseMap.Entry<V> = {
|
|
||||||
status: 'pending',
|
|
||||||
resolve,
|
|
||||||
promise,
|
|
||||||
}
|
|
||||||
if (null != value) {
|
|
||||||
initialized.resolve(value)
|
|
||||||
}
|
|
||||||
// set the entry
|
|
||||||
this.#map.set(key, initialized)
|
|
||||||
return initialized
|
|
||||||
}
|
|
||||||
|
|
||||||
set(key: K & { toString(): string }, value: V): this {
|
|
||||||
const initialized = this.#set(key, value)
|
|
||||||
// if the promise is pending ...
|
|
||||||
if (initialized.status === 'pending') {
|
|
||||||
// ... set the entry status to resolved to free the promise
|
|
||||||
this.#map.set(key, { status: 'resolved' })
|
|
||||||
// ... and resolve the promise with the given value
|
|
||||||
initialized.resolve(value)
|
|
||||||
}
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
get size(): number {
|
|
||||||
return this.#map.size
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
|
||||||
export namespace PromiseMap {
|
|
||||||
export type Entry<V> =
|
|
||||||
| { status: 'pending'; resolve: (item: V) => void; promise: Promise<V> }
|
|
||||||
| { status: 'resolved' }
|
|
||||||
}
|
|
@ -1,6 +1,5 @@
|
|||||||
import type * as LSP from 'vscode-languageserver-protocol'
|
import type * as LSP from 'vscode-languageserver-protocol'
|
||||||
import Client from './client'
|
import LspServerClient from './client'
|
||||||
import { SemanticToken, deserializeTokens } from './kcl/semantic_tokens'
|
|
||||||
import { LanguageServerPlugin } from 'editor/plugins/lsp/plugin'
|
import { LanguageServerPlugin } from 'editor/plugins/lsp/plugin'
|
||||||
import { CopilotLspCompletionParams } from 'wasm-lib/kcl/bindings/CopilotLspCompletionParams'
|
import { CopilotLspCompletionParams } from 'wasm-lib/kcl/bindings/CopilotLspCompletionParams'
|
||||||
import { CopilotCompletionResponse } from 'wasm-lib/kcl/bindings/CopilotCompletionResponse'
|
import { CopilotCompletionResponse } from 'wasm-lib/kcl/bindings/CopilotCompletionResponse'
|
||||||
@ -10,7 +9,7 @@ import { UpdateUnitsParams } from 'wasm-lib/kcl/bindings/UpdateUnitsParams'
|
|||||||
import { UpdateCanExecuteParams } from 'wasm-lib/kcl/bindings/UpdateCanExecuteParams'
|
import { UpdateCanExecuteParams } from 'wasm-lib/kcl/bindings/UpdateCanExecuteParams'
|
||||||
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
|
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
|
||||||
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
|
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
|
||||||
import { LspWorker } from './types'
|
import { LspWorker } from 'editor/plugins/lsp/types'
|
||||||
|
|
||||||
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
|
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
|
||||||
|
|
||||||
@ -54,7 +53,7 @@ interface LSPNotifyMap {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface LanguageServerClientOptions {
|
export interface LanguageServerClientOptions {
|
||||||
client: Client
|
client: LspServerClient
|
||||||
name: LspWorker
|
name: LspWorker
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -67,7 +66,7 @@ export interface LanguageServerOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class LanguageServerClient {
|
export class LanguageServerClient {
|
||||||
private client: Client
|
private client: LspServerClient
|
||||||
readonly name: string
|
readonly name: string
|
||||||
|
|
||||||
public ready: boolean
|
public ready: boolean
|
||||||
@ -77,7 +76,8 @@ export class LanguageServerClient {
|
|||||||
public initializePromise: Promise<void>
|
public initializePromise: Promise<void>
|
||||||
|
|
||||||
private isUpdatingSemanticTokens: boolean = false
|
private isUpdatingSemanticTokens: boolean = false
|
||||||
private semanticTokens: SemanticToken[] = []
|
// tODO: Fix this type
|
||||||
|
private semanticTokens: any = {}
|
||||||
private queuedUids: string[] = []
|
private queuedUids: string[] = []
|
||||||
|
|
||||||
constructor(options: LanguageServerClientOptions) {
|
constructor(options: LanguageServerClientOptions) {
|
||||||
@ -93,8 +93,7 @@ export class LanguageServerClient {
|
|||||||
|
|
||||||
async initialize() {
|
async initialize() {
|
||||||
// Start the client in the background.
|
// Start the client in the background.
|
||||||
this.client.setNotifyFn(this.processNotifications.bind(this))
|
this.client.startClient()
|
||||||
this.client.start()
|
|
||||||
|
|
||||||
this.ready = true
|
this.ready = true
|
||||||
}
|
}
|
||||||
@ -103,10 +102,6 @@ export class LanguageServerClient {
|
|||||||
return this.name
|
return this.name
|
||||||
}
|
}
|
||||||
|
|
||||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
|
||||||
return this.client.getServerCapabilities()
|
|
||||||
}
|
|
||||||
|
|
||||||
close() {}
|
close() {}
|
||||||
|
|
||||||
textDocumentDidOpen(params: LSP.DidOpenTextDocumentParams) {
|
textDocumentDidOpen(params: LSP.DidOpenTextDocumentParams) {
|
||||||
@ -117,13 +112,10 @@ export class LanguageServerClient {
|
|||||||
plugin.documentUri = params.textDocument.uri
|
plugin.documentUri = params.textDocument.uri
|
||||||
plugin.languageId = params.textDocument.languageId
|
plugin.languageId = params.textDocument.languageId
|
||||||
}
|
}
|
||||||
|
|
||||||
this.updateSemanticTokens(params.textDocument.uri)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
textDocumentDidChange(params: LSP.DidChangeTextDocumentParams) {
|
textDocumentDidChange(params: LSP.DidChangeTextDocumentParams) {
|
||||||
this.notify('textDocument/didChange', params)
|
this.notify('textDocument/didChange', params)
|
||||||
this.updateSemanticTokens(params.textDocument.uri)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
textDocumentDidClose(params: LSP.DidCloseTextDocumentParams) {
|
textDocumentDidClose(params: LSP.DidCloseTextDocumentParams) {
|
||||||
@ -160,64 +152,19 @@ export class LanguageServerClient {
|
|||||||
this.notify('workspace/didDeleteFiles', params)
|
this.notify('workspace/didDeleteFiles', params)
|
||||||
}
|
}
|
||||||
|
|
||||||
async updateSemanticTokens(uri: string) {
|
|
||||||
const serverCapabilities = this.getServerCapabilities()
|
|
||||||
if (!serverCapabilities.semanticTokensProvider) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure we can only run, if we aren't already running.
|
|
||||||
if (!this.isUpdatingSemanticTokens) {
|
|
||||||
this.isUpdatingSemanticTokens = true
|
|
||||||
|
|
||||||
const result = await this.request('textDocument/semanticTokens/full', {
|
|
||||||
textDocument: {
|
|
||||||
uri,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
this.semanticTokens = await deserializeTokens(
|
|
||||||
result.data,
|
|
||||||
this.getServerCapabilities().semanticTokensProvider
|
|
||||||
)
|
|
||||||
|
|
||||||
this.isUpdatingSemanticTokens = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getSemanticTokens(): SemanticToken[] {
|
|
||||||
return this.semanticTokens
|
|
||||||
}
|
|
||||||
|
|
||||||
async textDocumentHover(params: LSP.HoverParams) {
|
async textDocumentHover(params: LSP.HoverParams) {
|
||||||
const serverCapabilities = this.getServerCapabilities()
|
|
||||||
if (!serverCapabilities.hoverProvider) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return await this.request('textDocument/hover', params)
|
return await this.request('textDocument/hover', params)
|
||||||
}
|
}
|
||||||
|
|
||||||
async textDocumentFormatting(params: LSP.DocumentFormattingParams) {
|
async textDocumentFormatting(params: LSP.DocumentFormattingParams) {
|
||||||
const serverCapabilities = this.getServerCapabilities()
|
|
||||||
if (!serverCapabilities.documentFormattingProvider) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return await this.request('textDocument/formatting', params)
|
return await this.request('textDocument/formatting', params)
|
||||||
}
|
}
|
||||||
|
|
||||||
async textDocumentFoldingRange(params: LSP.FoldingRangeParams) {
|
async textDocumentFoldingRange(params: LSP.FoldingRangeParams) {
|
||||||
const serverCapabilities = this.getServerCapabilities()
|
|
||||||
if (!serverCapabilities.foldingRangeProvider) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
return await this.request('textDocument/foldingRange', params)
|
return await this.request('textDocument/foldingRange', params)
|
||||||
}
|
}
|
||||||
|
|
||||||
async textDocumentCompletion(params: LSP.CompletionParams) {
|
async textDocumentCompletion(params: LSP.CompletionParams) {
|
||||||
const serverCapabilities = this.getServerCapabilities()
|
|
||||||
if (!serverCapabilities.completionProvider) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const response = await this.request('textDocument/completion', params)
|
const response = await this.request('textDocument/completion', params)
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
@ -236,14 +183,19 @@ export class LanguageServerClient {
|
|||||||
method: K,
|
method: K,
|
||||||
params: LSPRequestMap[K][0]
|
params: LSPRequestMap[K][0]
|
||||||
): Promise<LSPRequestMap[K][1]> {
|
): Promise<LSPRequestMap[K][1]> {
|
||||||
return this.client.request(method, params) as Promise<LSPRequestMap[K][1]>
|
return this.client.client?.sendRequest(method, params) as Promise<
|
||||||
|
LSPRequestMap[K][1]
|
||||||
|
>
|
||||||
}
|
}
|
||||||
|
|
||||||
private notify<K extends keyof LSPNotifyMap>(
|
private notify<K extends keyof LSPNotifyMap>(
|
||||||
method: K,
|
method: K,
|
||||||
params: LSPNotifyMap[K]
|
params: LSPNotifyMap[K]
|
||||||
): void {
|
): Promise<void> {
|
||||||
return this.client.notify(method, params)
|
if (!this.client.client) {
|
||||||
|
return Promise.resolve()
|
||||||
|
}
|
||||||
|
return this.client.client.sendNotification(method, params)
|
||||||
}
|
}
|
||||||
|
|
||||||
async getCompletion(params: CopilotLspCompletionParams) {
|
async getCompletion(params: CopilotLspCompletionParams) {
|
||||||
@ -253,6 +205,33 @@ export class LanguageServerClient {
|
|||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getServerCapabilities(): LSP.ServerCapabilities<any> | null {
|
||||||
|
if (!this.client.client) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Fix this type
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateSemanticTokens(uri: string) {
|
||||||
|
// Make sure we can only run, if we aren't already running.
|
||||||
|
if (!this.isUpdatingSemanticTokens) {
|
||||||
|
this.isUpdatingSemanticTokens = true
|
||||||
|
|
||||||
|
this.semanticTokens = await this.request(
|
||||||
|
'textDocument/semanticTokens/full',
|
||||||
|
{
|
||||||
|
textDocument: {
|
||||||
|
uri,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
this.isUpdatingSemanticTokens = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async accept(uuid: string) {
|
async accept(uuid: string) {
|
||||||
const badUids = this.queuedUids.filter((u) => u !== uuid)
|
const badUids = this.queuedUids.filter((u) => u !== uuid)
|
||||||
this.queuedUids = []
|
this.queuedUids = []
|
||||||
@ -286,6 +265,7 @@ export class LanguageServerClient {
|
|||||||
return await this.request('kcl/updateCanExecute', params)
|
return await this.request('kcl/updateCanExecute', params)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: Fix this type
|
||||||
private processNotifications(notification: LSP.NotificationMessage) {
|
private processNotifications(notification: LSP.NotificationMessage) {
|
||||||
for (const plugin of this.plugins) plugin.processNotification(notification)
|
for (const plugin of this.plugins) plugin.processNotification(notification)
|
||||||
}
|
}
|
||||||
|
@ -122,13 +122,13 @@ export function kclPlugin(options: LanguageServerOptions): Extension {
|
|||||||
const line = state.doc.lineAt(pos)
|
const line = state.doc.lineAt(pos)
|
||||||
let trigKind: CompletionTriggerKind = CompletionTriggerKind.Invoked
|
let trigKind: CompletionTriggerKind = CompletionTriggerKind.Invoked
|
||||||
let trigChar: string | undefined
|
let trigChar: string | undefined
|
||||||
|
const serverCapabilities = plugin.client.getServerCapabilities()
|
||||||
if (
|
if (
|
||||||
|
serverCapabilities &&
|
||||||
!explicit &&
|
!explicit &&
|
||||||
plugin.client
|
serverCapabilities.completionProvider?.triggerCharacters?.includes(
|
||||||
.getServerCapabilities()
|
line.text[pos - line.from - 1]
|
||||||
.completionProvider?.triggerCharacters?.includes(
|
)
|
||||||
line.text[pos - line.from - 1]
|
|
||||||
)
|
|
||||||
) {
|
) {
|
||||||
trigKind = CompletionTriggerKind.TriggerCharacter
|
trigKind = CompletionTriggerKind.TriggerCharacter
|
||||||
trigChar = line.text[pos - line.from - 1]
|
trigChar = line.text[pos - line.from - 1]
|
||||||
|
@ -1,168 +0,0 @@
|
|||||||
// Extends the codemirror Parser for kcl.
|
|
||||||
|
|
||||||
import {
|
|
||||||
Parser,
|
|
||||||
Input,
|
|
||||||
TreeFragment,
|
|
||||||
PartialParse,
|
|
||||||
Tree,
|
|
||||||
NodeType,
|
|
||||||
NodeSet,
|
|
||||||
} from '@lezer/common'
|
|
||||||
import { LanguageServerClient } from 'editor/plugins/lsp'
|
|
||||||
import { posToOffset } from 'editor/plugins/lsp/util'
|
|
||||||
import { SemanticToken } from './semantic_tokens'
|
|
||||||
import { DocInput } from '@codemirror/language'
|
|
||||||
import { tags, styleTags } from '@lezer/highlight'
|
|
||||||
|
|
||||||
export default class KclParser extends Parser {
|
|
||||||
private client: LanguageServerClient
|
|
||||||
|
|
||||||
constructor(client: LanguageServerClient) {
|
|
||||||
super()
|
|
||||||
this.client = client
|
|
||||||
}
|
|
||||||
|
|
||||||
createParse(
|
|
||||||
input: Input,
|
|
||||||
fragments: readonly TreeFragment[],
|
|
||||||
ranges: readonly { from: number; to: number }[]
|
|
||||||
): PartialParse {
|
|
||||||
let parse: PartialParse = new Context(this, input, fragments, ranges)
|
|
||||||
return parse
|
|
||||||
}
|
|
||||||
|
|
||||||
getTokenTypes(): string[] {
|
|
||||||
return this.client.getServerCapabilities().semanticTokensProvider!.legend
|
|
||||||
.tokenTypes
|
|
||||||
}
|
|
||||||
|
|
||||||
getSemanticTokens(): SemanticToken[] {
|
|
||||||
return this.client.getSemanticTokens()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Context implements PartialParse {
|
|
||||||
private parser: KclParser
|
|
||||||
private input: DocInput
|
|
||||||
private fragments: readonly TreeFragment[]
|
|
||||||
private ranges: readonly { from: number; to: number }[]
|
|
||||||
|
|
||||||
private nodeTypes: { [key: string]: NodeType }
|
|
||||||
stoppedAt: number = 0
|
|
||||||
|
|
||||||
private semanticTokens: SemanticToken[] = []
|
|
||||||
private currentLine: number = 0
|
|
||||||
private currentColumn: number = 0
|
|
||||||
private nodeSet: NodeSet
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
/// The parser configuration used.
|
|
||||||
parser: KclParser,
|
|
||||||
input: Input,
|
|
||||||
fragments: readonly TreeFragment[],
|
|
||||||
ranges: readonly { from: number; to: number }[]
|
|
||||||
) {
|
|
||||||
this.parser = parser
|
|
||||||
this.input = input as DocInput
|
|
||||||
this.fragments = fragments
|
|
||||||
this.ranges = ranges
|
|
||||||
|
|
||||||
// Iterate over the semantic token types and create a node type for each.
|
|
||||||
this.nodeTypes = {}
|
|
||||||
let nodeArray: NodeType[] = []
|
|
||||||
this.parser.getTokenTypes().forEach((tokenType, index) => {
|
|
||||||
const nodeType = NodeType.define({
|
|
||||||
id: index,
|
|
||||||
name: tokenType,
|
|
||||||
// props: [this.styleTags],
|
|
||||||
})
|
|
||||||
this.nodeTypes[tokenType] = nodeType
|
|
||||||
nodeArray.push(nodeType)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.semanticTokens = this.parser.getSemanticTokens()
|
|
||||||
const styles = styleTags({
|
|
||||||
number: tags.number,
|
|
||||||
variable: tags.variableName,
|
|
||||||
operator: tags.operator,
|
|
||||||
keyword: tags.keyword,
|
|
||||||
string: tags.string,
|
|
||||||
comment: tags.comment,
|
|
||||||
function: tags.function(tags.variableName),
|
|
||||||
})
|
|
||||||
this.nodeSet = new NodeSet(nodeArray).extend(styles)
|
|
||||||
}
|
|
||||||
|
|
||||||
get parsedPos(): number {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
advance(): Tree | null {
|
|
||||||
if (this.semanticTokens.length === 0) {
|
|
||||||
return new Tree(NodeType.none, [], [], 0)
|
|
||||||
}
|
|
||||||
const tree = this.createTree(this.semanticTokens[0], 0)
|
|
||||||
this.stoppedAt = this.input.doc.length
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
createTree(token: SemanticToken, index: number): Tree {
|
|
||||||
const changedLine = token.delta_line !== 0
|
|
||||||
this.currentLine += token.delta_line
|
|
||||||
if (changedLine) {
|
|
||||||
this.currentColumn = 0
|
|
||||||
}
|
|
||||||
this.currentColumn += token.delta_start
|
|
||||||
|
|
||||||
// Let's get our position relative to the start of the file.
|
|
||||||
let currentPosition = posToOffset(this.input.doc, {
|
|
||||||
line: this.currentLine,
|
|
||||||
character: this.currentColumn,
|
|
||||||
})
|
|
||||||
|
|
||||||
const nodeType = this.nodeSet.types[this.nodeTypes[token.token_type].id]
|
|
||||||
|
|
||||||
if (currentPosition === undefined) {
|
|
||||||
// This is bad and weird.
|
|
||||||
return new Tree(nodeType, [], [], token.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (index >= this.semanticTokens.length - 1) {
|
|
||||||
// We have no children.
|
|
||||||
return new Tree(nodeType, [], [], token.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextIndex = index + 1
|
|
||||||
const nextToken = this.semanticTokens[nextIndex]
|
|
||||||
const changedLineNext = nextToken.delta_line !== 0
|
|
||||||
const nextLine = this.currentLine + nextToken.delta_line
|
|
||||||
const nextColumn = changedLineNext
|
|
||||||
? nextToken.delta_start
|
|
||||||
: this.currentColumn + nextToken.delta_start
|
|
||||||
const nextPosition = posToOffset(this.input.doc, {
|
|
||||||
line: nextLine,
|
|
||||||
character: nextColumn,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (nextPosition === undefined) {
|
|
||||||
// This is bad and weird.
|
|
||||||
return new Tree(nodeType, [], [], token.length)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Let's get the
|
|
||||||
|
|
||||||
return new Tree(
|
|
||||||
nodeType,
|
|
||||||
[this.createTree(nextToken, nextIndex)],
|
|
||||||
|
|
||||||
// The positions (offsets relative to the start of this tree) of the children.
|
|
||||||
[nextPosition - currentPosition],
|
|
||||||
token.length
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
stopAt(pos: number) {
|
|
||||||
this.stoppedAt = pos
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,51 +0,0 @@
|
|||||||
import type * as LSP from 'vscode-languageserver-protocol'
|
|
||||||
|
|
||||||
export class SemanticToken {
|
|
||||||
delta_line: number
|
|
||||||
delta_start: number
|
|
||||||
length: number
|
|
||||||
token_type: string
|
|
||||||
token_modifiers_bitset: string
|
|
||||||
|
|
||||||
constructor(
|
|
||||||
delta_line = 0,
|
|
||||||
delta_start = 0,
|
|
||||||
length = 0,
|
|
||||||
token_type = '',
|
|
||||||
token_modifiers_bitset = ''
|
|
||||||
) {
|
|
||||||
this.delta_line = delta_line
|
|
||||||
this.delta_start = delta_start
|
|
||||||
this.length = length
|
|
||||||
this.token_type = token_type
|
|
||||||
this.token_modifiers_bitset = token_modifiers_bitset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function deserializeTokens(
|
|
||||||
data: number[],
|
|
||||||
semanticTokensProvider?: LSP.SemanticTokensOptions
|
|
||||||
): Promise<SemanticToken[]> {
|
|
||||||
if (!semanticTokensProvider) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
// Check if data length is divisible by 5
|
|
||||||
if (data.length % 5 !== 0) {
|
|
||||||
return Promise.reject(new Error('Length is not divisible by 5'))
|
|
||||||
}
|
|
||||||
|
|
||||||
const tokens = []
|
|
||||||
for (let i = 0; i < data.length; i += 5) {
|
|
||||||
tokens.push(
|
|
||||||
new SemanticToken(
|
|
||||||
data[i],
|
|
||||||
data[i + 1],
|
|
||||||
data[i + 2],
|
|
||||||
semanticTokensProvider.legend.tokenTypes[data[i + 3]],
|
|
||||||
semanticTokensProvider.legend.tokenModifiers[data[i + 4]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return tokens
|
|
||||||
}
|
|
@ -145,11 +145,7 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
view: EditorView,
|
view: EditorView,
|
||||||
{ line, character }: { line: number; character: number }
|
{ line, character }: { line: number; character: number }
|
||||||
): Promise<Tooltip | null> {
|
): Promise<Tooltip | null> {
|
||||||
if (
|
if (!this.client.ready) return null
|
||||||
!this.client.ready ||
|
|
||||||
!this.client.getServerCapabilities().hoverProvider
|
|
||||||
)
|
|
||||||
return null
|
|
||||||
|
|
||||||
this.sendChange({ documentText: view.state.doc.toString() })
|
this.sendChange({ documentText: view.state.doc.toString() })
|
||||||
const result = await this.client.textDocumentHover({
|
const result = await this.client.textDocumentHover({
|
||||||
@ -175,11 +171,7 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async getFoldingRanges(): Promise<LSP.FoldingRange[] | null> {
|
async getFoldingRanges(): Promise<LSP.FoldingRange[] | null> {
|
||||||
if (
|
if (!this.client.ready) return null
|
||||||
!this.client.ready ||
|
|
||||||
!this.client.getServerCapabilities().foldingRangeProvider
|
|
||||||
)
|
|
||||||
return null
|
|
||||||
const result = await this.client.textDocumentFoldingRange({
|
const result = await this.client.textDocumentFoldingRange({
|
||||||
textDocument: { uri: this.documentUri },
|
textDocument: { uri: this.documentUri },
|
||||||
})
|
})
|
||||||
@ -259,11 +251,7 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async requestFormatting() {
|
async requestFormatting() {
|
||||||
if (
|
if (!this.client.ready) return null
|
||||||
!this.client.ready ||
|
|
||||||
!this.client.getServerCapabilities().documentFormattingProvider
|
|
||||||
)
|
|
||||||
return null
|
|
||||||
|
|
||||||
this.client.textDocumentDidChange({
|
this.client.textDocumentDidChange({
|
||||||
textDocument: {
|
textDocument: {
|
||||||
@ -309,11 +297,7 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
triggerCharacter: string | undefined
|
triggerCharacter: string | undefined
|
||||||
}
|
}
|
||||||
): Promise<CompletionResult | null> {
|
): Promise<CompletionResult | null> {
|
||||||
if (
|
if (!this.client.ready) return null
|
||||||
!this.client.ready ||
|
|
||||||
!this.client.getServerCapabilities().completionProvider
|
|
||||||
)
|
|
||||||
return null
|
|
||||||
|
|
||||||
this.sendChange({
|
this.sendChange({
|
||||||
documentText: context.state.doc.toString(),
|
documentText: context.state.doc.toString(),
|
||||||
|
@ -33,15 +33,19 @@ export default class Queue<T>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
constructor() {
|
constructor(stream?: WritableStream<T>) {
|
||||||
const closed = this.#closed
|
const closed = this.#closed
|
||||||
const promises = this.#promises
|
const promises = this.#promises
|
||||||
const resolvers = this.#resolvers
|
const resolvers = this.#resolvers
|
||||||
this.#stream = new WritableStream({
|
if (stream) {
|
||||||
write(item: T): void {
|
this.#stream = stream
|
||||||
Queue.#__enqueue(closed, promises, resolvers, item)
|
} else {
|
||||||
},
|
this.#stream = new WritableStream({
|
||||||
})
|
write(item: T): void {
|
||||||
|
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#add(): void {
|
#add(): void {
|
@ -1,77 +0,0 @@
|
|||||||
import {
|
|
||||||
Registration,
|
|
||||||
ServerCapabilities,
|
|
||||||
Unregistration,
|
|
||||||
} from 'vscode-languageserver-protocol'
|
|
||||||
|
|
||||||
interface IFlexibleServerCapabilities extends ServerCapabilities {
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
|
|
||||||
interface IMethodServerCapabilityProviderDictionary {
|
|
||||||
[key: string]: string
|
|
||||||
}
|
|
||||||
|
|
||||||
const ServerCapabilitiesProviders: IMethodServerCapabilityProviderDictionary = {
|
|
||||||
'textDocument/hover': 'hoverProvider',
|
|
||||||
'textDocument/completion': 'completionProvider',
|
|
||||||
'textDocument/signatureHelp': 'signatureHelpProvider',
|
|
||||||
'textDocument/definition': 'definitionProvider',
|
|
||||||
'textDocument/typeDefinition': 'typeDefinitionProvider',
|
|
||||||
'textDocument/implementation': 'implementationProvider',
|
|
||||||
'textDocument/references': 'referencesProvider',
|
|
||||||
'textDocument/documentHighlight': 'documentHighlightProvider',
|
|
||||||
'textDocument/documentSymbol': 'documentSymbolProvider',
|
|
||||||
'textDocument/workspaceSymbol': 'workspaceSymbolProvider',
|
|
||||||
'textDocument/codeAction': 'codeActionProvider',
|
|
||||||
'textDocument/codeLens': 'codeLensProvider',
|
|
||||||
'textDocument/documentFormatting': 'documentFormattingProvider',
|
|
||||||
'textDocument/documentRangeFormatting': 'documentRangeFormattingProvider',
|
|
||||||
'textDocument/documentOnTypeFormatting': 'documentOnTypeFormattingProvider',
|
|
||||||
'textDocument/rename': 'renameProvider',
|
|
||||||
'textDocument/documentLink': 'documentLinkProvider',
|
|
||||||
'textDocument/color': 'colorProvider',
|
|
||||||
'textDocument/foldingRange': 'foldingRangeProvider',
|
|
||||||
'textDocument/declaration': 'declarationProvider',
|
|
||||||
'textDocument/executeCommand': 'executeCommandProvider',
|
|
||||||
'textDocument/semanticTokens/full': 'semanticTokensProvider',
|
|
||||||
'textDocument/publishDiagnostics': 'diagnosticsProvider',
|
|
||||||
}
|
|
||||||
|
|
||||||
function registerServerCapability(
|
|
||||||
serverCapabilities: ServerCapabilities,
|
|
||||||
registration: Registration
|
|
||||||
): ServerCapabilities | Error {
|
|
||||||
const serverCapabilitiesCopy =
|
|
||||||
serverCapabilities as IFlexibleServerCapabilities
|
|
||||||
const { method, registerOptions } = registration
|
|
||||||
const providerName = ServerCapabilitiesProviders[method]
|
|
||||||
|
|
||||||
if (providerName) {
|
|
||||||
if (!registerOptions) {
|
|
||||||
serverCapabilitiesCopy[providerName] = true
|
|
||||||
} else {
|
|
||||||
serverCapabilitiesCopy[providerName] = Object.assign({}, registerOptions)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return new Error('Could not register server capability.')
|
|
||||||
}
|
|
||||||
|
|
||||||
return serverCapabilitiesCopy
|
|
||||||
}
|
|
||||||
|
|
||||||
function unregisterServerCapability(
|
|
||||||
serverCapabilities: ServerCapabilities,
|
|
||||||
unregistration: Unregistration
|
|
||||||
): ServerCapabilities {
|
|
||||||
const serverCapabilitiesCopy =
|
|
||||||
serverCapabilities as IFlexibleServerCapabilities
|
|
||||||
const { method } = unregistration
|
|
||||||
const providerName = ServerCapabilitiesProviders[method]
|
|
||||||
|
|
||||||
delete serverCapabilitiesCopy[providerName]
|
|
||||||
|
|
||||||
return serverCapabilitiesCopy
|
|
||||||
}
|
|
||||||
|
|
||||||
export { registerServerCapability, unregisterServerCapability }
|
|
@ -1,21 +0,0 @@
|
|||||||
import { Message } from 'vscode-languageserver-protocol'
|
|
||||||
|
|
||||||
const env = import.meta.env.MODE
|
|
||||||
|
|
||||||
export default class Tracer {
|
|
||||||
static client(message: string): void {
|
|
||||||
// These are really noisy, so we have a special env var for them.
|
|
||||||
if (env === 'lsp_tracing') {
|
|
||||||
console.log('lsp client message', message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static server(input: string | Message): void {
|
|
||||||
// These are really noisy, so we have a special env var for them.
|
|
||||||
if (env === 'lsp_tracing') {
|
|
||||||
const message: string =
|
|
||||||
typeof input === 'string' ? input : JSON.stringify(input)
|
|
||||||
console.log('lsp server message', message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -4,22 +4,27 @@ export enum LspWorker {
|
|||||||
Kcl = 'kcl',
|
Kcl = 'kcl',
|
||||||
Copilot = 'copilot',
|
Copilot = 'copilot',
|
||||||
}
|
}
|
||||||
export interface KclWorkerOptions {
|
|
||||||
wasmUrl: string
|
interface LspWorkerOptions {
|
||||||
token: string
|
token: string
|
||||||
baseUnit: UnitLength
|
|
||||||
apiBaseUrl: string
|
apiBaseUrl: string
|
||||||
|
callback: () => void
|
||||||
|
wasmUrl: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CopilotWorkerOptions {
|
export interface KclWorkerOptions extends LspWorkerOptions {
|
||||||
wasmUrl: string
|
baseUnit: UnitLength
|
||||||
token: string
|
}
|
||||||
apiBaseUrl: string
|
|
||||||
|
export interface CopilotWorkerOptions extends LspWorkerOptions {}
|
||||||
|
|
||||||
|
export interface LspContext {
|
||||||
|
worker: LspWorker
|
||||||
|
options: KclWorkerOptions | CopilotWorkerOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum LspWorkerEventType {
|
export enum LspWorkerEventType {
|
||||||
Init = 'init',
|
Init = 'init',
|
||||||
Call = 'call',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LspWorkerEvent {
|
export interface LspWorkerEvent {
|
||||||
|
@ -1,23 +1,77 @@
|
|||||||
import { Codec, FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
|
||||||
import { fileSystemManager } from 'lang/std/fileSystemManager'
|
import { fileSystemManager } from 'lang/std/fileSystemManager'
|
||||||
import init, {
|
import init, {
|
||||||
ServerConfig,
|
ServerConfig,
|
||||||
copilot_lsp_run,
|
copilot_lsp_run,
|
||||||
kcl_lsp_run,
|
kcl_lsp_run,
|
||||||
} from 'wasm-lib/pkg/wasm_lib'
|
} from 'wasm-lib/pkg/wasm_lib'
|
||||||
import * as jsrpc from 'json-rpc-2.0'
|
|
||||||
import {
|
import {
|
||||||
LspWorkerEventType,
|
|
||||||
LspWorkerEvent,
|
|
||||||
LspWorker,
|
LspWorker,
|
||||||
KclWorkerOptions,
|
KclWorkerOptions,
|
||||||
CopilotWorkerOptions,
|
CopilotWorkerOptions,
|
||||||
} from 'editor/plugins/lsp/types'
|
} from 'editor/plugins/lsp/types'
|
||||||
import { EngineCommandManager } from 'lang/std/engineConnection'
|
import { EngineCommandManager } from 'lang/std/engineConnection'
|
||||||
import { err } from 'lib/trap'
|
import { err } from 'lib/trap'
|
||||||
|
import { Message } from 'vscode-languageserver'
|
||||||
|
import { LspWorkerEvent, LspWorkerEventType } from 'editor/plugins/lsp/types'
|
||||||
|
import Queue from 'editor/plugins/lsp/queue'
|
||||||
|
import {
|
||||||
|
BrowserMessageReader,
|
||||||
|
BrowserMessageWriter,
|
||||||
|
} from 'vscode-languageserver-protocol/browser'
|
||||||
|
|
||||||
const intoServer: IntoServer = new IntoServer()
|
class Headers {
|
||||||
const fromServer: FromServer | Error = FromServer.create()
|
static add(message: string): string {
|
||||||
|
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||||
|
}
|
||||||
|
|
||||||
|
static remove(delimited: string): string {
|
||||||
|
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const encoder = new TextEncoder()
|
||||||
|
export const decoder = new TextDecoder()
|
||||||
|
|
||||||
|
class Codec {
|
||||||
|
static encode(message: Message): Uint8Array {
|
||||||
|
const rpc = JSON.stringify(message.jsonrpc)
|
||||||
|
const delimited = Headers.add(rpc)
|
||||||
|
return encoder.encode(delimited)
|
||||||
|
}
|
||||||
|
|
||||||
|
static decode<T>(data: Uint8Array): T {
|
||||||
|
const delimited = decoder.decode(data)
|
||||||
|
const message = Headers.remove(delimited)
|
||||||
|
return JSON.parse(message) as T
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class IntoServer extends Queue<Uint8Array> {
|
||||||
|
constructor(reader: BrowserMessageReader) {
|
||||||
|
super()
|
||||||
|
reader.listen((message: Message) => {
|
||||||
|
super.enqueue(Codec.encode(message))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FromServer extends Queue<Uint8Array> {
|
||||||
|
constructor(writer: BrowserMessageWriter) {
|
||||||
|
super(
|
||||||
|
new WritableStream({
|
||||||
|
write(item: Uint8Array): void {
|
||||||
|
writer.write(Codec.decode(item))
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const browserReader = new BrowserMessageReader(self)
|
||||||
|
const browserWriter = new BrowserMessageWriter(self)
|
||||||
|
|
||||||
|
const intoServer = new IntoServer(browserReader)
|
||||||
|
const fromServer = new FromServer(browserWriter)
|
||||||
|
|
||||||
// Initialise the wasm module.
|
// Initialise the wasm module.
|
||||||
const initialise = async (wasmUrl: string) => {
|
const initialise = async (wasmUrl: string) => {
|
||||||
@ -57,7 +111,7 @@ export async function kclLspRun(
|
|||||||
}
|
}
|
||||||
|
|
||||||
onmessage = function (event) {
|
onmessage = function (event) {
|
||||||
if (err(fromServer)) return
|
if (err(intoServer)) return
|
||||||
const { worker, eventType, eventData }: LspWorkerEvent = event.data
|
const { worker, eventType, eventData }: LspWorkerEvent = event.data
|
||||||
|
|
||||||
switch (eventType) {
|
switch (eventType) {
|
||||||
@ -95,35 +149,7 @@ onmessage = function (event) {
|
|||||||
console.error('Worker: Error loading wasm module', worker, error)
|
console.error('Worker: Error loading wasm module', worker, error)
|
||||||
})
|
})
|
||||||
break
|
break
|
||||||
case LspWorkerEventType.Call:
|
|
||||||
const data = eventData as Uint8Array
|
|
||||||
intoServer.enqueue(data)
|
|
||||||
const json: jsrpc.JSONRPCRequest = Codec.decode(data)
|
|
||||||
if (null != json.id) {
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
||||||
fromServer.responses.get(json.id)!.then((response) => {
|
|
||||||
const encoded = Codec.encode(response as jsrpc.JSONRPCResponse)
|
|
||||||
postMessage(encoded)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
default:
|
||||||
console.error('Worker: Unknown message type', worker, eventType)
|
console.error('Worker: Unknown message type', worker, eventType)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
new Promise<void>(async (resolve) => {
|
|
||||||
if (err(fromServer)) return
|
|
||||||
for await (const requests of fromServer.requests) {
|
|
||||||
const encoded = Codec.encode(requests as jsrpc.JSONRPCRequest)
|
|
||||||
postMessage(encoded)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
new Promise<void>(async (resolve) => {
|
|
||||||
if (err(fromServer)) return
|
|
||||||
for await (const notification of fromServer.notifications) {
|
|
||||||
const encoded = Codec.encode(notification as jsrpc.JSONRPCRequest)
|
|
||||||
postMessage(encoded)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
@ -15,7 +15,6 @@ import {
|
|||||||
sketchOnExtrudedFace,
|
sketchOnExtrudedFace,
|
||||||
deleteSegmentFromPipeExpression,
|
deleteSegmentFromPipeExpression,
|
||||||
removeSingleConstraintInfo,
|
removeSingleConstraintInfo,
|
||||||
deleteFromSelection,
|
|
||||||
} from './modifyAst'
|
} from './modifyAst'
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import { findUsesOfTagInPipe, getNodePathFromSourceRange } from './queryAst'
|
import { findUsesOfTagInPipe, getNodePathFromSourceRange } from './queryAst'
|
||||||
@ -697,196 +696,3 @@ describe('Testing removeSingleConstraintInfo', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Testing deleteFromSelection', () => {
|
|
||||||
const cases = [
|
|
||||||
[
|
|
||||||
'basicCase',
|
|
||||||
{
|
|
||||||
codeBefore: `const myVar = 5
|
|
||||||
const sketch003 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([3.82, 13.6], %)
|
|
||||||
|> line([-2.94, 2.7], %)
|
|
||||||
|> line([7.7, 0.16], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)`,
|
|
||||||
codeAfter: `const myVar = 5\n`,
|
|
||||||
lineOfInterest: 'line([-2.94, 2.7], %)',
|
|
||||||
type: 'default',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[
|
|
||||||
'delete extrude',
|
|
||||||
{
|
|
||||||
codeBefore: `const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([3.29, 7.86], %)
|
|
||||||
|> line([2.48, 2.44], %)
|
|
||||||
|> line([2.66, 1.17], %)
|
|
||||||
|> line([3.75, 0.46], %)
|
|
||||||
|> line([4.99, -0.46], %, $seg01)
|
|
||||||
|> line([-3.86, -2.73], %)
|
|
||||||
|> line([-17.67, 0.85], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(10, sketch001)`,
|
|
||||||
codeAfter: `const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([3.29, 7.86], %)
|
|
||||||
|> line([2.48, 2.44], %)
|
|
||||||
|> line([2.66, 1.17], %)
|
|
||||||
|> line([3.75, 0.46], %)
|
|
||||||
|> line([4.99, -0.46], %, $seg01)
|
|
||||||
|> line([-3.86, -2.73], %)
|
|
||||||
|> line([-17.67, 0.85], %)
|
|
||||||
|> close(%)\n`,
|
|
||||||
lineOfInterest: 'line([2.66, 1.17], %)',
|
|
||||||
type: 'extrude-wall',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[
|
|
||||||
'delete extrude with sketch on it',
|
|
||||||
{
|
|
||||||
codeBefore: `const myVar = 5
|
|
||||||
const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
|
||||||
|> line([0.08, myVar], %)
|
|
||||||
|> line([13.03, 2.02], %, $seg01)
|
|
||||||
|> line([3.9, -7.6], %)
|
|
||||||
|> line([-11.18, -2.15], %)
|
|
||||||
|> line([5.41, -9.61], %)
|
|
||||||
|> line([-8.54, -2.51], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(5, sketch001)
|
|
||||||
const sketch002 = startSketchOn(extrude001, seg01)
|
|
||||||
|> startProfileAt([-12.55, 2.89], %)
|
|
||||||
|> line([3.02, 1.9], %)
|
|
||||||
|> line([1.82, -1.49], %, $seg02)
|
|
||||||
|> angledLine([-86, segLen(seg02, %)], %)
|
|
||||||
|> line([-3.97, -0.53], %)
|
|
||||||
|> line([0.3, 0.84], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)`,
|
|
||||||
codeAfter: `const myVar = 5
|
|
||||||
const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
|
||||||
|> line([0.08, myVar], %)
|
|
||||||
|> line([13.03, 2.02], %, $seg01)
|
|
||||||
|> line([3.9, -7.6], %)
|
|
||||||
|> line([-11.18, -2.15], %)
|
|
||||||
|> line([5.41, -9.61], %)
|
|
||||||
|> line([-8.54, -2.51], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const sketch002 = startSketchOn({
|
|
||||||
plane: {
|
|
||||||
origin: { x: 1, y: 2, z: 3 },
|
|
||||||
x_axis: { x: 4, y: 5, z: 6 },
|
|
||||||
y_axis: { x: 7, y: 8, z: 9 },
|
|
||||||
z_axis: { x: 10, y: 11, z: 12 }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|> startProfileAt([-12.55, 2.89], %)
|
|
||||||
|> line([3.02, 1.9], %)
|
|
||||||
|> line([1.82, -1.49], %, $seg02)
|
|
||||||
|> angledLine([-86, segLen(seg02, %)], %)
|
|
||||||
|> line([-3.97, -0.53], %)
|
|
||||||
|> line([0.3, 0.84], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
`,
|
|
||||||
lineOfInterest: 'line([-11.18, -2.15], %)',
|
|
||||||
type: 'extrude-wall',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
[
|
|
||||||
'delete extrude with sketch on it',
|
|
||||||
{
|
|
||||||
codeBefore: `const myVar = 5
|
|
||||||
const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
|
||||||
|> line([0.08, myVar], %)
|
|
||||||
|> line([13.03, 2.02], %, $seg01)
|
|
||||||
|> line([3.9, -7.6], %)
|
|
||||||
|> line([-11.18, -2.15], %)
|
|
||||||
|> line([5.41, -9.61], %)
|
|
||||||
|> line([-8.54, -2.51], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const extrude001 = extrude(5, sketch001)
|
|
||||||
const sketch002 = startSketchOn(extrude001, seg01)
|
|
||||||
|> startProfileAt([-12.55, 2.89], %)
|
|
||||||
|> line([3.02, 1.9], %)
|
|
||||||
|> line([1.82, -1.49], %, $seg02)
|
|
||||||
|> angledLine([-86, segLen(seg02, %)], %)
|
|
||||||
|> line([-3.97, -0.53], %)
|
|
||||||
|> line([0.3, 0.84], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)`,
|
|
||||||
codeAfter: `const myVar = 5
|
|
||||||
const sketch001 = startSketchOn('XZ')
|
|
||||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
|
||||||
|> line([0.08, myVar], %)
|
|
||||||
|> line([13.03, 2.02], %, $seg01)
|
|
||||||
|> line([3.9, -7.6], %)
|
|
||||||
|> line([-11.18, -2.15], %)
|
|
||||||
|> line([5.41, -9.61], %)
|
|
||||||
|> line([-8.54, -2.51], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
const sketch002 = startSketchOn({
|
|
||||||
plane: {
|
|
||||||
origin: { x: 1, y: 2, z: 3 },
|
|
||||||
x_axis: { x: 4, y: 5, z: 6 },
|
|
||||||
y_axis: { x: 7, y: 8, z: 9 },
|
|
||||||
z_axis: { x: 10, y: 11, z: 12 }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|> startProfileAt([-12.55, 2.89], %)
|
|
||||||
|> line([3.02, 1.9], %)
|
|
||||||
|> line([1.82, -1.49], %, $seg02)
|
|
||||||
|> angledLine([-86, segLen(seg02, %)], %)
|
|
||||||
|> line([-3.97, -0.53], %)
|
|
||||||
|> line([0.3, 0.84], %)
|
|
||||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
|
||||||
|> close(%)
|
|
||||||
`,
|
|
||||||
lineOfInterest: 'startProfileAt([4.46, 5.12], %, $tag)',
|
|
||||||
type: 'end-cap',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
] as const
|
|
||||||
test.each(cases)(
|
|
||||||
'%s',
|
|
||||||
async (name, { codeBefore, codeAfter, lineOfInterest, type }) => {
|
|
||||||
// const lineOfInterest = 'line([-2.94, 2.7], %)'
|
|
||||||
const ast = parse(codeBefore)
|
|
||||||
if (err(ast)) throw ast
|
|
||||||
const programMemory = await enginelessExecutor(ast)
|
|
||||||
|
|
||||||
// deleteFromSelection
|
|
||||||
const range: [number, number] = [
|
|
||||||
codeBefore.indexOf(lineOfInterest),
|
|
||||||
codeBefore.indexOf(lineOfInterest) + lineOfInterest.length,
|
|
||||||
]
|
|
||||||
const newAst = await deleteFromSelection(
|
|
||||||
ast,
|
|
||||||
{
|
|
||||||
range,
|
|
||||||
type,
|
|
||||||
},
|
|
||||||
programMemory,
|
|
||||||
async () => {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
|
||||||
return {
|
|
||||||
origin: { x: 1, y: 2, z: 3 },
|
|
||||||
x_axis: { x: 4, y: 5, z: 6 },
|
|
||||||
y_axis: { x: 7, y: 8, z: 9 },
|
|
||||||
z_axis: { x: 10, y: 11, z: 12 },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
if (err(newAst)) throw newAst
|
|
||||||
const newCode = recast(newAst)
|
|
||||||
expect(newCode).toBe(codeAfter)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
@ -17,7 +17,6 @@ import {
|
|||||||
PathToNode,
|
PathToNode,
|
||||||
ProgramMemory,
|
ProgramMemory,
|
||||||
SourceRange,
|
SourceRange,
|
||||||
SketchGroup,
|
|
||||||
} from './wasm'
|
} from './wasm'
|
||||||
import {
|
import {
|
||||||
isNodeSafeToReplacePath,
|
isNodeSafeToReplacePath,
|
||||||
@ -26,7 +25,6 @@ import {
|
|||||||
getNodeFromPath,
|
getNodeFromPath,
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
isNodeSafeToReplace,
|
isNodeSafeToReplace,
|
||||||
traverse,
|
|
||||||
} from './queryAst'
|
} from './queryAst'
|
||||||
import { addTagForSketchOnFace, getConstraintInfo } from './std/sketch'
|
import { addTagForSketchOnFace, getConstraintInfo } from './std/sketch'
|
||||||
import {
|
import {
|
||||||
@ -40,7 +38,6 @@ import { isOverlap, roundOff } from 'lib/utils'
|
|||||||
import { KCL_DEFAULT_CONSTANT_PREFIXES } from 'lib/constants'
|
import { KCL_DEFAULT_CONSTANT_PREFIXES } from 'lib/constants'
|
||||||
import { ConstrainInfo } from './std/stdTypes'
|
import { ConstrainInfo } from './std/stdTypes'
|
||||||
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
|
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
|
||||||
import { Models } from '@kittycad/lib'
|
|
||||||
|
|
||||||
export function startSketchOnDefault(
|
export function startSketchOnDefault(
|
||||||
node: Program,
|
node: Program,
|
||||||
@ -710,7 +707,7 @@ export function moveValueIntoNewVariablePath(
|
|||||||
programMemory,
|
programMemory,
|
||||||
pathToNode
|
pathToNode
|
||||||
)
|
)
|
||||||
let _node = ast
|
let _node = JSON.parse(JSON.stringify(ast))
|
||||||
const boop = replacer(_node, variableName)
|
const boop = replacer(_node, variableName)
|
||||||
if (trap(boop)) return { modifiedAst: ast }
|
if (trap(boop)) return { modifiedAst: ast }
|
||||||
|
|
||||||
@ -742,7 +739,7 @@ export function moveValueIntoNewVariable(
|
|||||||
programMemory,
|
programMemory,
|
||||||
sourceRange
|
sourceRange
|
||||||
)
|
)
|
||||||
let _node = ast
|
let _node = JSON.parse(JSON.stringify(ast))
|
||||||
const replaced = replacer(_node, variableName)
|
const replaced = replacer(_node, variableName)
|
||||||
if (trap(replaced)) return { modifiedAst: ast }
|
if (trap(replaced)) return { modifiedAst: ast }
|
||||||
|
|
||||||
@ -767,7 +764,7 @@ export function deleteSegmentFromPipeExpression(
|
|||||||
code: string,
|
code: string,
|
||||||
pathToNode: PathToNode
|
pathToNode: PathToNode
|
||||||
): Program | Error {
|
): Program | Error {
|
||||||
let _modifiedAst: Program = modifiedAst
|
let _modifiedAst: Program = JSON.parse(JSON.stringify(modifiedAst))
|
||||||
|
|
||||||
dependentRanges.forEach((range) => {
|
dependentRanges.forEach((range) => {
|
||||||
const path = getNodePathFromSourceRange(_modifiedAst, range)
|
const path = getNodePathFromSourceRange(_modifiedAst, range)
|
||||||
@ -876,175 +873,3 @@ export function removeSingleConstraintInfo(
|
|||||||
if (err(retval)) return false
|
if (err(retval)) return false
|
||||||
return retval
|
return retval
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteFromSelection(
|
|
||||||
ast: Program,
|
|
||||||
selection: Selection,
|
|
||||||
programMemory: ProgramMemory,
|
|
||||||
getFaceDetails: (id: string) => Promise<Models['FaceIsPlanar_type']> = () =>
|
|
||||||
({} as any)
|
|
||||||
): Promise<Program | Error> {
|
|
||||||
const astClone = ast
|
|
||||||
const range = selection.range
|
|
||||||
const path = getNodePathFromSourceRange(ast, range)
|
|
||||||
const varDec = getNodeFromPath<VariableDeclarator>(
|
|
||||||
ast,
|
|
||||||
path,
|
|
||||||
'VariableDeclarator'
|
|
||||||
)
|
|
||||||
if (err(varDec)) return varDec
|
|
||||||
if (
|
|
||||||
(selection.type === 'extrude-wall' ||
|
|
||||||
selection.type === 'end-cap' ||
|
|
||||||
selection.type === 'start-cap') &&
|
|
||||||
varDec.node.init.type === 'PipeExpression'
|
|
||||||
) {
|
|
||||||
const varDecName = varDec.node.id.name
|
|
||||||
let pathToNode: PathToNode | null = null
|
|
||||||
let extrudeNameToDelete = ''
|
|
||||||
traverse(astClone, {
|
|
||||||
enter: (node, path) => {
|
|
||||||
if (node.type === 'VariableDeclaration') {
|
|
||||||
const dec = node.declarations[0]
|
|
||||||
if (
|
|
||||||
dec.init.type === 'CallExpression' &&
|
|
||||||
(dec.init.callee.name === 'extrude' ||
|
|
||||||
dec.init.callee.name === 'revolve') &&
|
|
||||||
dec.init.arguments?.[1].type === 'Identifier' &&
|
|
||||||
dec.init.arguments?.[1].name === varDecName
|
|
||||||
) {
|
|
||||||
pathToNode = path
|
|
||||||
extrudeNameToDelete = dec.id.name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if (!pathToNode) return new Error('Could not find extrude variable')
|
|
||||||
|
|
||||||
const expressionIndex = pathToNode[1][0] as number
|
|
||||||
astClone.body.splice(expressionIndex, 1)
|
|
||||||
if (extrudeNameToDelete) {
|
|
||||||
await new Promise(async (resolve) => {
|
|
||||||
let currentVariableName = ''
|
|
||||||
const pathsDependingOnExtrude: Array<{
|
|
||||||
path: PathToNode
|
|
||||||
sketchName: string
|
|
||||||
}> = []
|
|
||||||
traverse(astClone, {
|
|
||||||
leave: (node) => {
|
|
||||||
if (node.type === 'VariableDeclaration') {
|
|
||||||
currentVariableName = ''
|
|
||||||
}
|
|
||||||
},
|
|
||||||
enter: async (node, path) => {
|
|
||||||
if (node.type === 'VariableDeclaration') {
|
|
||||||
currentVariableName = node.declarations[0].id.name
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
// match startSketchOn(${extrudeNameToDelete})
|
|
||||||
node.type === 'CallExpression' &&
|
|
||||||
node.callee.name === 'startSketchOn' &&
|
|
||||||
node.arguments[0].type === 'Identifier' &&
|
|
||||||
node.arguments[0].name === extrudeNameToDelete
|
|
||||||
) {
|
|
||||||
pathsDependingOnExtrude.push({
|
|
||||||
path,
|
|
||||||
sketchName: currentVariableName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
const roundLiteral = (x: number) => createLiteral(roundOff(x))
|
|
||||||
const modificationDetails: {
|
|
||||||
parent: PipeExpression['body']
|
|
||||||
faceDetails: Models['FaceIsPlanar_type']
|
|
||||||
lastKey: number
|
|
||||||
}[] = []
|
|
||||||
for (const { path, sketchName } of pathsDependingOnExtrude) {
|
|
||||||
const parent = getNodeFromPath<PipeExpression['body']>(
|
|
||||||
astClone,
|
|
||||||
path.slice(0, -1)
|
|
||||||
)
|
|
||||||
if (err(parent)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const sketchToPreserve = programMemory.root[sketchName] as SketchGroup
|
|
||||||
console.log('sketchName', sketchName)
|
|
||||||
// Can't kick off multiple requests at once as getFaceDetails
|
|
||||||
// is three engine calls in one and they conflict
|
|
||||||
const faceDetails = await getFaceDetails(sketchToPreserve.on.id)
|
|
||||||
if (
|
|
||||||
!(
|
|
||||||
faceDetails.origin &&
|
|
||||||
faceDetails.x_axis &&
|
|
||||||
faceDetails.y_axis &&
|
|
||||||
faceDetails.z_axis
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const lastKey = Number(path.slice(-1)[0][0])
|
|
||||||
modificationDetails.push({
|
|
||||||
parent: parent.node,
|
|
||||||
faceDetails,
|
|
||||||
lastKey,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
for (const { parent, faceDetails, lastKey } of modificationDetails) {
|
|
||||||
if (
|
|
||||||
!(
|
|
||||||
faceDetails.origin &&
|
|
||||||
faceDetails.x_axis &&
|
|
||||||
faceDetails.y_axis &&
|
|
||||||
faceDetails.z_axis
|
|
||||||
)
|
|
||||||
) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
parent[lastKey] = createCallExpressionStdLib('startSketchOn', [
|
|
||||||
createObjectExpression({
|
|
||||||
plane: createObjectExpression({
|
|
||||||
origin: createObjectExpression({
|
|
||||||
x: roundLiteral(faceDetails.origin.x),
|
|
||||||
y: roundLiteral(faceDetails.origin.y),
|
|
||||||
z: roundLiteral(faceDetails.origin.z),
|
|
||||||
}),
|
|
||||||
x_axis: createObjectExpression({
|
|
||||||
x: roundLiteral(faceDetails.x_axis.x),
|
|
||||||
y: roundLiteral(faceDetails.x_axis.y),
|
|
||||||
z: roundLiteral(faceDetails.x_axis.z),
|
|
||||||
}),
|
|
||||||
y_axis: createObjectExpression({
|
|
||||||
x: roundLiteral(faceDetails.y_axis.x),
|
|
||||||
y: roundLiteral(faceDetails.y_axis.y),
|
|
||||||
z: roundLiteral(faceDetails.y_axis.z),
|
|
||||||
}),
|
|
||||||
z_axis: createObjectExpression({
|
|
||||||
x: roundLiteral(faceDetails.z_axis.x),
|
|
||||||
y: roundLiteral(faceDetails.z_axis.y),
|
|
||||||
z: roundLiteral(faceDetails.z_axis.z),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
resolve(true)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// await prom
|
|
||||||
return astClone
|
|
||||||
} else if (varDec.node.init.type === 'PipeExpression') {
|
|
||||||
const pipeBody = varDec.node.init.body
|
|
||||||
if (
|
|
||||||
pipeBody[0].type === 'CallExpression' &&
|
|
||||||
pipeBody[0].callee.name === 'startSketchOn'
|
|
||||||
) {
|
|
||||||
// remove varDec
|
|
||||||
const varDecIndex = varDec.shallowPath[1][0] as number
|
|
||||||
astClone.body.splice(varDecIndex, 1)
|
|
||||||
return astClone
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Error('Selection not recognised, could not delete')
|
|
||||||
}
|
|
||||||
|
@ -19,7 +19,6 @@ import {
|
|||||||
createPipeSubstitution,
|
createPipeSubstitution,
|
||||||
} from './modifyAst'
|
} from './modifyAst'
|
||||||
import { err } from 'lib/trap'
|
import { err } from 'lib/trap'
|
||||||
import { warn } from 'node:console'
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await initPromise
|
await initPromise
|
||||||
@ -87,7 +86,10 @@ const yo2 = hmm([identifierGuy + 5])`
|
|||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
expect(result.value?.type).toBe('BinaryExpression')
|
expect(result.value?.type).toBe('BinaryExpression')
|
||||||
expect(code.slice(result.value.start, result.value.end)).toBe('100 + 100')
|
expect(code.slice(result.value.start, result.value.end)).toBe('100 + 100')
|
||||||
const replaced = result.replacer(ast, 'replaceName')
|
const replaced = result.replacer(
|
||||||
|
JSON.parse(JSON.stringify(ast)),
|
||||||
|
'replaceName'
|
||||||
|
)
|
||||||
if (err(replaced)) throw replaced
|
if (err(replaced)) throw replaced
|
||||||
const outCode = recast(replaced.modifiedAst)
|
const outCode = recast(replaced.modifiedAst)
|
||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
@ -111,7 +113,10 @@ const yo2 = hmm([identifierGuy + 5])`
|
|||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
expect(result.value?.type).toBe('CallExpression')
|
expect(result.value?.type).toBe('CallExpression')
|
||||||
expect(code.slice(result.value.start, result.value.end)).toBe("def('yo')")
|
expect(code.slice(result.value.start, result.value.end)).toBe("def('yo')")
|
||||||
const replaced = result.replacer(ast, 'replaceName')
|
const replaced = result.replacer(
|
||||||
|
JSON.parse(JSON.stringify(ast)),
|
||||||
|
'replaceName'
|
||||||
|
)
|
||||||
if (err(replaced)) throw replaced
|
if (err(replaced)) throw replaced
|
||||||
const outCode = recast(replaced.modifiedAst)
|
const outCode = recast(replaced.modifiedAst)
|
||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
@ -148,7 +153,10 @@ const yo2 = hmm([identifierGuy + 5])`
|
|||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
expect(result.value?.type).toBe('BinaryExpression')
|
expect(result.value?.type).toBe('BinaryExpression')
|
||||||
expect(code.slice(result.value.start, result.value.end)).toBe('5 + 6')
|
expect(code.slice(result.value.start, result.value.end)).toBe('5 + 6')
|
||||||
const replaced = result.replacer(ast, 'replaceName')
|
const replaced = result.replacer(
|
||||||
|
JSON.parse(JSON.stringify(ast)),
|
||||||
|
'replaceName'
|
||||||
|
)
|
||||||
if (err(replaced)) throw replaced
|
if (err(replaced)) throw replaced
|
||||||
const outCode = recast(replaced.modifiedAst)
|
const outCode = recast(replaced.modifiedAst)
|
||||||
expect(outCode).toContain(`const yo = replaceName`)
|
expect(outCode).toContain(`const yo = replaceName`)
|
||||||
@ -164,7 +172,10 @@ const yo2 = hmm([identifierGuy + 5])`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe(
|
expect(code.slice(result.value.start, result.value.end)).toBe(
|
||||||
"jkl('yo') + 2"
|
"jkl('yo') + 2"
|
||||||
)
|
)
|
||||||
const replaced = result.replacer(ast, 'replaceName')
|
const replaced = result.replacer(
|
||||||
|
JSON.parse(JSON.stringify(ast)),
|
||||||
|
'replaceName'
|
||||||
|
)
|
||||||
if (err(replaced)) throw replaced
|
if (err(replaced)) throw replaced
|
||||||
const { modifiedAst } = replaced
|
const { modifiedAst } = replaced
|
||||||
const outCode = recast(modifiedAst)
|
const outCode = recast(modifiedAst)
|
||||||
@ -183,7 +194,10 @@ const yo2 = hmm([identifierGuy + 5])`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe(
|
expect(code.slice(result.value.start, result.value.end)).toBe(
|
||||||
'identifierGuy + 5'
|
'identifierGuy + 5'
|
||||||
)
|
)
|
||||||
const replaced = result.replacer(ast, 'replaceName')
|
const replaced = result.replacer(
|
||||||
|
JSON.parse(JSON.stringify(ast)),
|
||||||
|
'replaceName'
|
||||||
|
)
|
||||||
if (err(replaced)) throw replaced
|
if (err(replaced)) throw replaced
|
||||||
const { modifiedAst } = replaced
|
const { modifiedAst } = replaced
|
||||||
const outCode = recast(modifiedAst)
|
const outCode = recast(modifiedAst)
|
||||||
|
@ -520,8 +520,8 @@ export function isNodeSafeToReplacePath(
|
|||||||
const replaceNodeWithIdentifier: ReplacerFn = (_ast, varName) => {
|
const replaceNodeWithIdentifier: ReplacerFn = (_ast, varName) => {
|
||||||
const identifier = createIdentifier(varName)
|
const identifier = createIdentifier(varName)
|
||||||
const last = finPath[finPath.length - 1]
|
const last = finPath[finPath.length - 1]
|
||||||
const pathToReplaced = finPath
|
const pathToReplaced = JSON.parse(JSON.stringify(finPath))
|
||||||
pathToReplaced[1][0] = (pathToReplaced[1][0] as number) + 1
|
pathToReplaced[1][0] = pathToReplaced[1][0] + 1
|
||||||
const startPath = finPath.slice(0, -1)
|
const startPath = finPath.slice(0, -1)
|
||||||
const _nodeToReplace = getNodeFromPath(_ast, startPath)
|
const _nodeToReplace = getNodeFromPath(_ast, startPath)
|
||||||
if (err(_nodeToReplace)) return _nodeToReplace
|
if (err(_nodeToReplace)) return _nodeToReplace
|
||||||
|
@ -1496,7 +1496,7 @@ export function transformSecondarySketchLinesTagFirst({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
| Error {
|
| Error {
|
||||||
// let node = ast
|
// let node = JSON.parse(JSON.stringify(ast))
|
||||||
const primarySelection = selectionRanges.codeBasedSelections[0].range
|
const primarySelection = selectionRanges.codeBasedSelections[0].range
|
||||||
|
|
||||||
const _tag = giveSketchFnCallTag(ast, primarySelection, forceSegName)
|
const _tag = giveSketchFnCallTag(ast, primarySelection, forceSegName)
|
||||||
@ -1565,7 +1565,7 @@ export function transformAstSketchLines({
|
|||||||
}
|
}
|
||||||
| Error {
|
| Error {
|
||||||
// deep clone since we are mutating in a loop, of which any could fail
|
// deep clone since we are mutating in a loop, of which any could fail
|
||||||
let node = ast
|
let node = JSON.parse(JSON.stringify(ast))
|
||||||
let _valueUsedInTransform // TODO should this be an array?
|
let _valueUsedInTransform // TODO should this be an array?
|
||||||
const pathToNodeMap: PathToNodeMap = {}
|
const pathToNodeMap: PathToNodeMap = {}
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ export function updatePathToNodeFromMap(
|
|||||||
oldPath: PathToNode,
|
oldPath: PathToNode,
|
||||||
pathToNodeMap: { [key: number]: PathToNode }
|
pathToNodeMap: { [key: number]: PathToNode }
|
||||||
): PathToNode {
|
): PathToNode {
|
||||||
const updatedPathToNode = oldPath
|
const updatedPathToNode = JSON.parse(JSON.stringify(oldPath))
|
||||||
let max = 0
|
let max = 0
|
||||||
Object.values(pathToNodeMap).forEach((path) => {
|
Object.values(pathToNodeMap).forEach((path) => {
|
||||||
const index = Number(path[1][0])
|
const index = Number(path[1][0])
|
||||||
|
@ -334,7 +334,6 @@ export async function coreDump(
|
|||||||
openGithubIssue: boolean = false
|
openGithubIssue: boolean = false
|
||||||
): Promise<CoreDumpInfo> {
|
): Promise<CoreDumpInfo> {
|
||||||
try {
|
try {
|
||||||
console.warn('CoreDump: Initializing core dump')
|
|
||||||
const dump: CoreDumpInfo = await coredump(coreDumpManager)
|
const dump: CoreDumpInfo = await coredump(coreDumpManager)
|
||||||
/* NOTE: this console output of the coredump should include the field
|
/* NOTE: this console output of the coredump should include the field
|
||||||
`github_issue_url` which is not in the uploaded coredump file.
|
`github_issue_url` which is not in the uploaded coredump file.
|
||||||
|
@ -13,14 +13,6 @@ import screenshot from 'lib/screenshot'
|
|||||||
import React from 'react'
|
import React from 'react'
|
||||||
import { VITE_KC_API_BASE_URL } from 'env'
|
import { VITE_KC_API_BASE_URL } from 'env'
|
||||||
|
|
||||||
/* eslint-disable suggest-no-throw/suggest-no-throw --
|
|
||||||
* All the throws in CoreDumpManager are intentional and should be caught and handled properly
|
|
||||||
* by the calling Promises with a catch block. The throws are essential to properly handling
|
|
||||||
* when the app isn't ready enough or otherwise unable to produce a core dump. By throwing
|
|
||||||
* instead of simply erroring, the code halts execution at the first point which it cannot
|
|
||||||
* complete the core dump request.
|
|
||||||
**/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CoreDumpManager module
|
* CoreDumpManager module
|
||||||
* - for getting all the values from the JS world to pass to the Rust world for a core dump.
|
* - for getting all the values from the JS world to pass to the Rust world for a core dump.
|
||||||
@ -30,7 +22,6 @@ import { VITE_KC_API_BASE_URL } from 'env'
|
|||||||
// CoreDumpManager is instantiated in ModelingMachineProvider and passed to coreDump() in wasm.ts
|
// CoreDumpManager is instantiated in ModelingMachineProvider and passed to coreDump() in wasm.ts
|
||||||
// The async function coreDump() handles any errors thrown in its Promise catch method and rethrows
|
// The async function coreDump() handles any errors thrown in its Promise catch method and rethrows
|
||||||
// them to so the toast handler in ModelingMachineProvider can show the user an error message toast
|
// them to so the toast handler in ModelingMachineProvider can show the user an error message toast
|
||||||
// TODO: Throw more
|
|
||||||
export class CoreDumpManager {
|
export class CoreDumpManager {
|
||||||
engineCommandManager: EngineCommandManager
|
engineCommandManager: EngineCommandManager
|
||||||
htmlRef: React.RefObject<HTMLDivElement> | null
|
htmlRef: React.RefObject<HTMLDivElement> | null
|
||||||
|
@ -9,12 +9,12 @@ const wallMountL = 6 // the length of the bracket
|
|||||||
const sigmaAllow = 35000 // psi
|
const sigmaAllow = 35000 // psi
|
||||||
const width = 6 // inch
|
const width = 6 // inch
|
||||||
const p = 300 // Force on shelf - lbs
|
const p = 300 // Force on shelf - lbs
|
||||||
const shelfLength = 12 // inches
|
const L = 12 // inches
|
||||||
const moment = shelfLength * p / 2 // Moment experienced at fixed end of bracket
|
const M = L * p / 2 // Moment experienced at fixed end of bracket
|
||||||
const factorOfSafety = 2 // Factor of safety of 2 to be conservative
|
const FOS = 2 // Factor of safety of 2 to be conservative
|
||||||
|
|
||||||
// Calculate the thickness off the bending stress and factor of safety
|
// Calculate the thickness off the bending stress and factor of safety
|
||||||
const thickness = sqrt(6 * moment * factorOfSafety / (width * sigmaAllow))
|
const thickness = sqrt(6 * M * FOS / (width * sigmaAllow))
|
||||||
|
|
||||||
// 0.25 inch fillet radius
|
// 0.25 inch fillet radius
|
||||||
const filletR = 0.25
|
const filletR = 0.25
|
||||||
|
@ -29,10 +29,7 @@ export function cleanErrs<T>(
|
|||||||
return [argsWOutErr.length !== value.length, argsWOutErr, argsWErr]
|
return [argsWOutErr.length !== value.length, argsWOutErr, argsWErr]
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Used to report errors to user at a certain point in execution
|
||||||
* Used to report errors to user at a certain point in execution
|
|
||||||
* @returns boolean
|
|
||||||
*/
|
|
||||||
export function trap<T>(
|
export function trap<T>(
|
||||||
value: ExcludeErr<T> | Error,
|
value: ExcludeErr<T> | Error,
|
||||||
opts?: {
|
opts?: {
|
||||||
@ -46,8 +43,6 @@ export function trap<T>(
|
|||||||
|
|
||||||
console.error(value)
|
console.error(value)
|
||||||
opts?.suppress ||
|
opts?.suppress ||
|
||||||
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString(), {
|
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString())
|
||||||
id: 'error',
|
|
||||||
})
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,9 @@ export function useCalculateKclExpression({
|
|||||||
ast,
|
ast,
|
||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
useFakeExecutor: true,
|
useFakeExecutor: true,
|
||||||
programMemoryOverride: kclManager.programMemory,
|
programMemoryOverride: JSON.parse(
|
||||||
|
JSON.stringify(kclManager.programMemory)
|
||||||
|
),
|
||||||
})
|
})
|
||||||
const resultDeclaration = ast.body.find(
|
const resultDeclaration = ast.body.find(
|
||||||
(a) =>
|
(a) =>
|
||||||
|
@ -26,11 +26,7 @@ import {
|
|||||||
applyConstraintEqualLength,
|
applyConstraintEqualLength,
|
||||||
setEqualLengthInfo,
|
setEqualLengthInfo,
|
||||||
} from 'components/Toolbar/EqualLength'
|
} from 'components/Toolbar/EqualLength'
|
||||||
import {
|
import { addStartProfileAt, extrudeSketch } from 'lang/modifyAst'
|
||||||
addStartProfileAt,
|
|
||||||
deleteFromSelection,
|
|
||||||
extrudeSketch,
|
|
||||||
} from 'lang/modifyAst'
|
|
||||||
import { getNodeFromPath } from '../lang/queryAst'
|
import { getNodeFromPath } from '../lang/queryAst'
|
||||||
import {
|
import {
|
||||||
applyConstraintEqualAngle,
|
applyConstraintEqualAngle,
|
||||||
@ -48,14 +44,12 @@ import {
|
|||||||
import { Models } from '@kittycad/lib/dist/types/src'
|
import { Models } from '@kittycad/lib/dist/types/src'
|
||||||
import { ModelingCommandSchema } from 'lib/commandBarConfigs/modelingCommandConfig'
|
import { ModelingCommandSchema } from 'lib/commandBarConfigs/modelingCommandConfig'
|
||||||
import { err, trap } from 'lib/trap'
|
import { err, trap } from 'lib/trap'
|
||||||
import { DefaultPlaneStr, getFaceDetails } from 'clientSideScene/sceneEntities'
|
import { DefaultPlaneStr } from 'clientSideScene/sceneEntities'
|
||||||
import { Vector3 } from 'three'
|
import { Vector3 } from 'three'
|
||||||
import { quaternionFromUpNForward } from 'clientSideScene/helpers'
|
import { quaternionFromUpNForward } from 'clientSideScene/helpers'
|
||||||
import { uuidv4 } from 'lib/utils'
|
import { uuidv4 } from 'lib/utils'
|
||||||
import { Coords2d } from 'lang/std/sketch'
|
import { Coords2d } from 'lang/std/sketch'
|
||||||
import { deleteSegment } from 'clientSideScene/ClientSideSceneComp'
|
import { deleteSegment } from 'clientSideScene/ClientSideSceneComp'
|
||||||
import { executeAst } from 'useStore'
|
|
||||||
import toast from 'react-hot-toast'
|
|
||||||
|
|
||||||
export const MODELING_PERSIST_KEY = 'MODELING_PERSIST_KEY'
|
export const MODELING_PERSIST_KEY = 'MODELING_PERSIST_KEY'
|
||||||
|
|
||||||
@ -163,9 +157,6 @@ export type ModelingMachineEvent =
|
|||||||
type: 'Set selection'
|
type: 'Set selection'
|
||||||
data: SetSelections
|
data: SetSelections
|
||||||
}
|
}
|
||||||
| {
|
|
||||||
type: 'Delete selection'
|
|
||||||
}
|
|
||||||
| { type: 'Sketch no face' }
|
| { type: 'Sketch no face' }
|
||||||
| { type: 'Toggle gui mode' }
|
| { type: 'Toggle gui mode' }
|
||||||
| { type: 'Cancel' }
|
| { type: 'Cancel' }
|
||||||
@ -282,13 +273,6 @@ export const modelingMachine = createMachine(
|
|||||||
cond: 'Has exportable geometry',
|
cond: 'Has exportable geometry',
|
||||||
actions: 'Engine export',
|
actions: 'Engine export',
|
||||||
},
|
},
|
||||||
|
|
||||||
'Delete selection': {
|
|
||||||
target: 'idle',
|
|
||||||
cond: 'has valid selection for deletion',
|
|
||||||
actions: ['AST delete selection'],
|
|
||||||
internal: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
|
||||||
entry: 'reset client scene mouse handlers',
|
entry: 'reset client scene mouse handlers',
|
||||||
@ -979,42 +963,6 @@ export const modelingMachine = createMachine(
|
|||||||
editorManager.selectRange(updatedAst?.selections)
|
editorManager.selectRange(updatedAst?.selections)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
'AST delete selection': async ({ sketchDetails, selectionRanges }) => {
|
|
||||||
let ast = kclManager.ast
|
|
||||||
|
|
||||||
const getScaledFaceDetails = async (entityId: string) => {
|
|
||||||
const faceDetails = await getFaceDetails(entityId)
|
|
||||||
if (err(faceDetails)) return {}
|
|
||||||
return {
|
|
||||||
...faceDetails,
|
|
||||||
origin: {
|
|
||||||
x: faceDetails.origin.x / sceneInfra._baseUnitMultiplier,
|
|
||||||
y: faceDetails.origin.y / sceneInfra._baseUnitMultiplier,
|
|
||||||
z: faceDetails.origin.z / sceneInfra._baseUnitMultiplier,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const modifiedAst = await deleteFromSelection(
|
|
||||||
ast,
|
|
||||||
selectionRanges.codeBasedSelections[0],
|
|
||||||
kclManager.programMemory,
|
|
||||||
getScaledFaceDetails
|
|
||||||
)
|
|
||||||
if (err(modifiedAst)) return
|
|
||||||
|
|
||||||
const testExecute = await executeAst({
|
|
||||||
ast: modifiedAst,
|
|
||||||
useFakeExecutor: true,
|
|
||||||
engineCommandManager,
|
|
||||||
})
|
|
||||||
if (testExecute.errors.length) {
|
|
||||||
toast.error('Unable to delete part')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
await kclManager.updateAst(modifiedAst, true)
|
|
||||||
},
|
|
||||||
'conditionally equip line tool': (_, { type }) => {
|
'conditionally equip line tool': (_, { type }) => {
|
||||||
if (type === 'done.invoke.animate-to-face') {
|
if (type === 'done.invoke.animate-to-face') {
|
||||||
sceneInfra.modelingSend('Equip Line tool')
|
sceneInfra.modelingSend('Equip Line tool')
|
||||||
|
14
src/wasm-lib/Cargo.lock
generated
14
src/wasm-lib/Cargo.lock
generated
@ -533,7 +533,6 @@ dependencies = [
|
|||||||
"ciborium",
|
"ciborium",
|
||||||
"clap",
|
"clap",
|
||||||
"criterion-plot",
|
"criterion-plot",
|
||||||
"futures",
|
|
||||||
"is-terminal",
|
"is-terminal",
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
@ -546,7 +545,6 @@ dependencies = [
|
|||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tinytemplate",
|
"tinytemplate",
|
||||||
"tokio",
|
|
||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -712,7 +710,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-docs"
|
name = "derive-docs"
|
||||||
version = "0.1.19"
|
version = "0.1.18"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"Inflector",
|
"Inflector",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -1385,7 +1383,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
version = "0.1.68"
|
version = "0.1.67"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"approx",
|
"approx",
|
||||||
@ -3277,9 +3275,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ts-rs"
|
name = "ts-rs"
|
||||||
version = "9.0.1"
|
version = "9.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b44017f9f875786e543595076374b9ef7d13465a518dd93d6ccdbf5b432dde8c"
|
checksum = "5e2dcf58e612adda9a83800731e8e4aba04d8a302b9029617b0b6e4b021d5357"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -3291,9 +3289,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ts-rs-macros"
|
name = "ts-rs-macros"
|
||||||
version = "9.0.1"
|
version = "9.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c88cc88fd23b5a04528f3a8436024f20010a16ec18eb23c164b1242f65860130"
|
checksum = "cbdee324e50a7402416d9c25270d3df4241ed528af5d36dda18b6f219551c577"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "derive-docs"
|
name = "derive-docs"
|
||||||
description = "A tool for generating documentation from Rust derive macros"
|
description = "A tool for generating documentation from Rust derive macros"
|
||||||
version = "0.1.19"
|
version = "0.1.18"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/KittyCAD/modeling-app"
|
repository = "https://github.com/KittyCAD/modeling-app"
|
||||||
|
@ -761,7 +761,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
|||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -795,7 +795,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
|||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default()).await.unwrap();
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default()).await.unwrap();
|
||||||
|
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
|
|
||||||
// Zoom to fit.
|
// Zoom to fit.
|
||||||
ctx.engine
|
ctx.engine
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
@ -106,7 +106,7 @@ mod test_examples_show {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -135,7 +135,7 @@ mod test_examples_show {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -17,7 +17,7 @@ mod test_examples_my_func {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -47,7 +47,7 @@ mod test_examples_my_func {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
@ -108,7 +108,7 @@ mod test_examples_my_func {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -137,7 +137,7 @@ mod test_examples_my_func {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -17,7 +17,7 @@ mod test_examples_line_to {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -47,7 +47,7 @@ mod test_examples_line_to {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
@ -108,7 +108,7 @@ mod test_examples_line_to {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -137,7 +137,7 @@ mod test_examples_line_to {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_min {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_min {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
@ -106,7 +106,7 @@ mod test_examples_min {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -135,7 +135,7 @@ mod test_examples_min {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
|||||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
ctx.run(&program, None).await.unwrap();
|
ctx.run(program, None).await.unwrap();
|
||||||
ctx.engine
|
ctx.engine
|
||||||
.send_modeling_cmd(
|
.send_modeling_cmd(
|
||||||
uuid::Uuid::new_v4(),
|
uuid::Uuid::new_v4(),
|
||||||
|
@ -157,7 +157,7 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
|
|||||||
// Let users know if the test is taking a long time.
|
// Let users know if the test is taking a long time.
|
||||||
let (done_tx, done_rx) = oneshot::channel::<()>();
|
let (done_tx, done_rx) = oneshot::channel::<()>();
|
||||||
let timer = time_until(done_rx);
|
let timer = time_until(done_rx);
|
||||||
let snapshot = match state.execute_and_prepare_snapshot(&program).await {
|
let snapshot = match state.execute_and_prepare_snapshot(program).await {
|
||||||
Ok(sn) => sn,
|
Ok(sn) => sn,
|
||||||
Err(e) => return kcl_err(e),
|
Err(e) => return kcl_err(e),
|
||||||
};
|
};
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
description = "KittyCAD Language implementation and tools"
|
description = "KittyCAD Language implementation and tools"
|
||||||
version = "0.1.68"
|
version = "0.1.67"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/KittyCAD/modeling-app"
|
repository = "https://github.com/KittyCAD/modeling-app"
|
||||||
@ -19,7 +19,7 @@ chrono = "0.4.38"
|
|||||||
clap = { version = "4.5.7", default-features = false, optional = true }
|
clap = { version = "4.5.7", default-features = false, optional = true }
|
||||||
dashmap = "6.0.1"
|
dashmap = "6.0.1"
|
||||||
databake = { version = "0.1.8", features = ["derive"] }
|
databake = { version = "0.1.8", features = ["derive"] }
|
||||||
derive-docs = { version = "0.1.19", path = "../derive-docs" }
|
derive-docs = { version = "0.1.18", path = "../derive-docs" }
|
||||||
form_urlencoded = "1.2.1"
|
form_urlencoded = "1.2.1"
|
||||||
futures = { version = "0.3.30" }
|
futures = { version = "0.3.30" }
|
||||||
git_rev = "0.1.0"
|
git_rev = "0.1.0"
|
||||||
@ -28,7 +28,7 @@ kittycad = { workspace = true, features = ["clap"] }
|
|||||||
lazy_static = "1.5.0"
|
lazy_static = "1.5.0"
|
||||||
mime_guess = "2.0.4"
|
mime_guess = "2.0.4"
|
||||||
parse-display = "0.9.1"
|
parse-display = "0.9.1"
|
||||||
pyo3 = { version = "0.22.0", optional = true }
|
pyo3 = {version = "0.22.0", optional = true}
|
||||||
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
||||||
ropey = "1.6.1"
|
ropey = "1.6.1"
|
||||||
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
||||||
@ -37,7 +37,7 @@ serde_json = "1.0.118"
|
|||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
thiserror = "1.0.61"
|
thiserror = "1.0.61"
|
||||||
toml = "0.8.14"
|
toml = "0.8.14"
|
||||||
ts-rs = { version = "9.0.1", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
ts-rs = { version = "9.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
||||||
url = { version = "2.5.2", features = ["serde"] }
|
url = { version = "2.5.2", features = ["serde"] }
|
||||||
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
||||||
validator = { version = "0.18.1", features = ["derive"] }
|
validator = { version = "0.18.1", features = ["derive"] }
|
||||||
@ -67,8 +67,6 @@ cli = ["dep:clap"]
|
|||||||
disable-println = []
|
disable-println = []
|
||||||
engine = []
|
engine = []
|
||||||
pyo3 = ["dep:pyo3"]
|
pyo3 = ["dep:pyo3"]
|
||||||
# Helper functions also used in benchmarks.
|
|
||||||
lsp-test-util = []
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
@ -80,10 +78,10 @@ debug = true # Flamegraphs of benchmarks require accurate debug symbols
|
|||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
criterion = { version = "0.5.1", features = ["async_tokio"] }
|
criterion = "0.5.1"
|
||||||
expectorate = "1.1.0"
|
expectorate = "1.1.0"
|
||||||
iai = "0.1"
|
iai = "0.1"
|
||||||
image = { version = "0.25.1", default-features = false, features = ["png"] }
|
image = {version = "0.25.1", default-features = false, features = ["png"] }
|
||||||
insta = { version = "1.38.0", features = ["json"] }
|
insta = { version = "1.38.0", features = ["json"] }
|
||||||
itertools = "0.13.0"
|
itertools = "0.13.0"
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
@ -97,13 +95,3 @@ harness = false
|
|||||||
[[bench]]
|
[[bench]]
|
||||||
name = "compiler_benchmark_iai"
|
name = "compiler_benchmark_iai"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "lsp_semantic_tokens_benchmark_criterion"
|
|
||||||
harness = false
|
|
||||||
required-features = ["lsp-test-util"]
|
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "lsp_semantic_tokens_benchmark_iai"
|
|
||||||
harness = false
|
|
||||||
required-features = ["lsp-test-util"]
|
|
||||||
|
@ -1,65 +0,0 @@
|
|||||||
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
|
||||||
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
|
||||||
use tokio::runtime::Runtime;
|
|
||||||
use tower_lsp::LanguageServer;
|
|
||||||
|
|
||||||
async fn kcl_lsp_semantic_tokens(code: &str) {
|
|
||||||
let server = kcl_lsp_server(false).await.unwrap();
|
|
||||||
|
|
||||||
// Send open file.
|
|
||||||
server
|
|
||||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
|
||||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
|
||||||
uri: "file:///test.kcl".try_into().unwrap(),
|
|
||||||
language_id: "kcl".to_string(),
|
|
||||||
version: 1,
|
|
||||||
text: code.to_string(),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Send semantic tokens request.
|
|
||||||
black_box(
|
|
||||||
server
|
|
||||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
|
||||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
|
||||||
uri: "file:///test.kcl".try_into().unwrap(),
|
|
||||||
},
|
|
||||||
partial_result_params: Default::default(),
|
|
||||||
work_done_progress_params: Default::default(),
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bench_kcl_lsp_semantic_tokens(c: &mut Criterion) {
|
|
||||||
for (name, code) in [
|
|
||||||
("pipes_on_pipes", PIPES_PROGRAM),
|
|
||||||
("big_kitt", KITT_PROGRAM),
|
|
||||||
("cube", CUBE_PROGRAM),
|
|
||||||
("math", MATH_PROGRAM),
|
|
||||||
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
|
||||||
("global_tags", GLOBAL_TAGS_FILE),
|
|
||||||
] {
|
|
||||||
c.bench_with_input(BenchmarkId::new("semantic_tokens_", name), &code, |b, &s| {
|
|
||||||
let rt = Runtime::new().unwrap();
|
|
||||||
|
|
||||||
// Spawn a future onto the runtime
|
|
||||||
b.iter(|| {
|
|
||||||
rt.block_on(kcl_lsp_semantic_tokens(s));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_kcl_lsp_semantic_tokens);
|
|
||||||
criterion_main!(benches);
|
|
||||||
|
|
||||||
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
|
|
||||||
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
|
||||||
const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");
|
|
||||||
const MATH_PROGRAM: &str = include_str!("../../tests/executor/inputs/math.kcl");
|
|
||||||
const MIKE_STRESS_TEST_PROGRAM: &str = include_str!("../../tests/executor/inputs/mike_stress_test.kcl");
|
|
||||||
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
|
@ -1,45 +0,0 @@
|
|||||||
use iai::black_box;
|
|
||||||
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
|
||||||
use tower_lsp::LanguageServer;
|
|
||||||
|
|
||||||
async fn kcl_lsp_semantic_tokens(code: &str) {
|
|
||||||
let server = kcl_lsp_server(false).await.unwrap();
|
|
||||||
|
|
||||||
// Send open file.
|
|
||||||
server
|
|
||||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
|
||||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
|
||||||
uri: "file:///test.kcl".try_into().unwrap(),
|
|
||||||
language_id: "kcl".to_string(),
|
|
||||||
version: 1,
|
|
||||||
text: code.to_string(),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Send semantic tokens request.
|
|
||||||
black_box(
|
|
||||||
server
|
|
||||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
|
||||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
|
||||||
uri: "file:///test.kcl".try_into().unwrap(),
|
|
||||||
},
|
|
||||||
partial_result_params: Default::default(),
|
|
||||||
work_done_progress_params: Default::default(),
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn semantic_tokens_global_tags() {
|
|
||||||
let code = GLOBAL_TAGS_FILE;
|
|
||||||
kcl_lsp_semantic_tokens(code).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
iai::main! {
|
|
||||||
semantic_tokens_global_tags,
|
|
||||||
}
|
|
||||||
|
|
||||||
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
|
@ -159,7 +159,7 @@ impl Program {
|
|||||||
RuleT: crate::lint::rule::Rule<'a>,
|
RuleT: crate::lint::rule::Rule<'a>,
|
||||||
{
|
{
|
||||||
let v = Arc::new(Mutex::new(vec![]));
|
let v = Arc::new(Mutex::new(vec![]));
|
||||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||||
let mut findings = v.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut findings = v.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
findings.append(&mut rule.check(node)?);
|
findings.append(&mut rule.check(node)?);
|
||||||
Ok(true)
|
Ok(true)
|
||||||
@ -171,13 +171,13 @@ impl Program {
|
|||||||
/// Walk the ast and get all the variables and tags as completion items.
|
/// Walk the ast and get all the variables and tags as completion items.
|
||||||
pub fn completion_items<'a>(&'a self) -> Result<Vec<CompletionItem>> {
|
pub fn completion_items<'a>(&'a self) -> Result<Vec<CompletionItem>> {
|
||||||
let completions = Arc::new(Mutex::new(vec![]));
|
let completions = Arc::new(Mutex::new(vec![]));
|
||||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||||
let mut findings = completions.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut findings = completions.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
match node {
|
match node {
|
||||||
crate::walk::Node::TagDeclarator(tag) => {
|
crate::lint::Node::TagDeclarator(tag) => {
|
||||||
findings.push(tag.into());
|
findings.push(tag.into());
|
||||||
}
|
}
|
||||||
crate::walk::Node::VariableDeclaration(variable) => {
|
crate::lint::Node::VariableDeclaration(variable) => {
|
||||||
findings.extend::<Vec<CompletionItem>>(variable.into());
|
findings.extend::<Vec<CompletionItem>>(variable.into());
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -255,13 +255,13 @@ impl Program {
|
|||||||
/// Returns all the lsp symbols in the program.
|
/// Returns all the lsp symbols in the program.
|
||||||
pub fn get_lsp_symbols<'a>(&'a self, code: &str) -> Result<Vec<DocumentSymbol>> {
|
pub fn get_lsp_symbols<'a>(&'a self, code: &str) -> Result<Vec<DocumentSymbol>> {
|
||||||
let symbols = Arc::new(Mutex::new(vec![]));
|
let symbols = Arc::new(Mutex::new(vec![]));
|
||||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||||
let mut findings = symbols.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut findings = symbols.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
match node {
|
match node {
|
||||||
crate::walk::Node::TagDeclarator(tag) => {
|
crate::lint::Node::TagDeclarator(tag) => {
|
||||||
findings.extend::<Vec<DocumentSymbol>>(tag.get_lsp_symbols(code));
|
findings.extend::<Vec<DocumentSymbol>>(tag.get_lsp_symbols(code));
|
||||||
}
|
}
|
||||||
crate::walk::Node::VariableDeclaration(variable) => {
|
crate::lint::Node::VariableDeclaration(variable) => {
|
||||||
findings.extend::<Vec<DocumentSymbol>>(variable.get_lsp_symbols(code));
|
findings.extend::<Vec<DocumentSymbol>>(variable.get_lsp_symbols(code));
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -1217,7 +1217,7 @@ impl CallExpression {
|
|||||||
|
|
||||||
// Call the stdlib function
|
// Call the stdlib function
|
||||||
let p = func.function().clone().body;
|
let p = func.function().clone().body;
|
||||||
let results = match ctx.inner_execute(&p, &mut fn_memory, BodyType::Block).await {
|
let results = match ctx.inner_execute(p, &mut fn_memory, BodyType::Block).await {
|
||||||
Ok(results) => results,
|
Ok(results) => results,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
// We need to override the source ranges so we don't get the embedded kcl
|
// We need to override the source ranges so we don't get the embedded kcl
|
||||||
|
@ -828,7 +828,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
some_function,
|
some_function,
|
||||||
crate::ast::types::Function::StdLib {
|
crate::ast::types::Function::StdLib {
|
||||||
func: Box::new(crate::std::sketch::Line)
|
func: Box::new(crate::std::sketch::Line),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -110,8 +110,6 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Send the modeling cmd and wait for the response.
|
/// Send the modeling cmd and wait for the response.
|
||||||
// TODO: This should only borrow `cmd`.
|
|
||||||
// See https://github.com/KittyCAD/modeling-app/issues/2821
|
|
||||||
async fn send_modeling_cmd(
|
async fn send_modeling_cmd(
|
||||||
&self,
|
&self,
|
||||||
id: uuid::Uuid,
|
id: uuid::Uuid,
|
||||||
|
@ -142,7 +142,7 @@ impl IntoDiagnostic for KclError {
|
|||||||
|
|
||||||
Diagnostic {
|
Diagnostic {
|
||||||
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
||||||
severity: Some(self.severity()),
|
severity: Some(DiagnosticSeverity::ERROR),
|
||||||
code: None,
|
code: None,
|
||||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||||
code_description: None,
|
code_description: None,
|
||||||
@ -153,10 +153,6 @@ impl IntoDiagnostic for KclError {
|
|||||||
data: None,
|
data: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn severity(&self) -> DiagnosticSeverity {
|
|
||||||
DiagnosticSeverity::ERROR
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is different than to_string() in that it will serialize the Error
|
/// This is different than to_string() in that it will serialize the Error
|
||||||
|
@ -16,7 +16,7 @@ use crate::{
|
|||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
fs::FileManager,
|
fs::FileManager,
|
||||||
settings::types::UnitLength,
|
settings::types::UnitLength,
|
||||||
std::{FnAsArg, FunctionKind, StdLib},
|
std::{FunctionKind, StdLib},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
@ -640,52 +640,6 @@ impl MemoryItem {
|
|||||||
.map(Some)
|
.map(Some)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_user_val(&self) -> Option<&UserVal> {
|
|
||||||
if let MemoryItem::UserVal(x) = self {
|
|
||||||
Some(x)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If this value is of type u32, return it.
|
|
||||||
pub fn get_u32(&self, source_ranges: Vec<SourceRange>) -> Result<u32, KclError> {
|
|
||||||
let err = KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "Expected an integer >= 0".to_owned(),
|
|
||||||
source_ranges,
|
|
||||||
});
|
|
||||||
self.as_user_val()
|
|
||||||
.and_then(|uv| uv.value.as_number())
|
|
||||||
.and_then(|n| n.as_u64())
|
|
||||||
.and_then(|n| u32::try_from(n).ok())
|
|
||||||
.ok_or(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If this value is of type function, return it.
|
|
||||||
pub fn get_function(&self, source_ranges: Vec<SourceRange>) -> Result<FnAsArg<'_>, KclError> {
|
|
||||||
let MemoryItem::Function {
|
|
||||||
func,
|
|
||||||
expression,
|
|
||||||
meta: _,
|
|
||||||
} = &self
|
|
||||||
else {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "not an in-memory function".to_string(),
|
|
||||||
source_ranges,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
let func = func.as_ref().ok_or_else(|| {
|
|
||||||
KclError::Semantic(KclErrorDetails {
|
|
||||||
message: format!("Not an in-memory function: {:?}", expression),
|
|
||||||
source_ranges,
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
Ok(FnAsArg {
|
|
||||||
func,
|
|
||||||
expr: expression.to_owned(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Backwards compatibility for getting a tag from a memory item.
|
/// Backwards compatibility for getting a tag from a memory item.
|
||||||
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
|
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
|
||||||
match self {
|
match self {
|
||||||
@ -1501,7 +1455,7 @@ impl ExecutorContext {
|
|||||||
/// Kurt uses this for partial execution.
|
/// Kurt uses this for partial execution.
|
||||||
pub async fn run(
|
pub async fn run(
|
||||||
&self,
|
&self,
|
||||||
program: &crate::ast::types::Program,
|
program: crate::ast::types::Program,
|
||||||
memory: Option<ProgramMemory>,
|
memory: Option<ProgramMemory>,
|
||||||
) -> Result<ProgramMemory, KclError> {
|
) -> Result<ProgramMemory, KclError> {
|
||||||
// Before we even start executing the program, set the units.
|
// Before we even start executing the program, set the units.
|
||||||
@ -1527,7 +1481,7 @@ impl ExecutorContext {
|
|||||||
#[async_recursion]
|
#[async_recursion]
|
||||||
pub(crate) async fn inner_execute(
|
pub(crate) async fn inner_execute(
|
||||||
&self,
|
&self,
|
||||||
program: &crate::ast::types::Program,
|
program: crate::ast::types::Program,
|
||||||
memory: &mut ProgramMemory,
|
memory: &mut ProgramMemory,
|
||||||
body_type: BodyType,
|
body_type: BodyType,
|
||||||
) -> Result<ProgramMemory, KclError> {
|
) -> Result<ProgramMemory, KclError> {
|
||||||
@ -1559,7 +1513,9 @@ impl ExecutorContext {
|
|||||||
}
|
}
|
||||||
FunctionKind::Std(func) => {
|
FunctionKind::Std(func) => {
|
||||||
let mut newmem = memory.clone();
|
let mut newmem = memory.clone();
|
||||||
let result = self.inner_execute(func.program(), &mut newmem, BodyType::Block).await?;
|
let result = self
|
||||||
|
.inner_execute(func.program().to_owned(), &mut newmem, BodyType::Block)
|
||||||
|
.await?;
|
||||||
memory.return_ = result.return_;
|
memory.return_ = result.return_;
|
||||||
}
|
}
|
||||||
FunctionKind::UserDefined => {
|
FunctionKind::UserDefined => {
|
||||||
@ -1695,7 +1651,7 @@ impl ExecutorContext {
|
|||||||
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
|
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
|
||||||
|
|
||||||
let result = ctx
|
let result = ctx
|
||||||
.inner_execute(&function_expression.body, &mut fn_memory, BodyType::Block)
|
.inner_execute(function_expression.body.clone(), &mut fn_memory, BodyType::Block)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok((result.return_, fn_memory.get_tags()))
|
Ok((result.return_, fn_memory.get_tags()))
|
||||||
@ -1745,7 +1701,7 @@ impl ExecutorContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Execute the program, then get a PNG screenshot.
|
/// Execute the program, then get a PNG screenshot.
|
||||||
pub async fn execute_and_prepare_snapshot(&self, program: &Program) -> Result<kittycad::types::TakeSnapshot> {
|
pub async fn execute_and_prepare_snapshot(&self, program: Program) -> Result<kittycad::types::TakeSnapshot> {
|
||||||
let _ = self.run(program, None).await?;
|
let _ = self.run(program, None).await?;
|
||||||
|
|
||||||
// Zoom to fit.
|
// Zoom to fit.
|
||||||
@ -1862,7 +1818,7 @@ mod tests {
|
|||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
is_mock: true,
|
is_mock: true,
|
||||||
};
|
};
|
||||||
let memory = ctx.run(&program, None).await?;
|
let memory = ctx.run(program, None).await?;
|
||||||
|
|
||||||
Ok(memory)
|
Ok(memory)
|
||||||
}
|
}
|
||||||
|
@ -1,45 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use schemars::JsonSchema;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
ast::types::FunctionExpression,
|
|
||||||
errors::KclError,
|
|
||||||
executor::{ExecutorContext, MemoryFunction, MemoryItem, Metadata, ProgramMemory, ProgramReturn},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A function being used as a parameter into a stdlib function.
|
|
||||||
pub struct FunctionParam<'a> {
|
|
||||||
pub inner: &'a MemoryFunction,
|
|
||||||
pub memory: ProgramMemory,
|
|
||||||
pub fn_expr: Box<FunctionExpression>,
|
|
||||||
pub meta: Vec<Metadata>,
|
|
||||||
pub ctx: ExecutorContext,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> FunctionParam<'a> {
|
|
||||||
pub async fn call(
|
|
||||||
&self,
|
|
||||||
args: Vec<MemoryItem>,
|
|
||||||
) -> Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError> {
|
|
||||||
(self.inner)(
|
|
||||||
args,
|
|
||||||
self.memory.clone(),
|
|
||||||
self.fn_expr.clone(),
|
|
||||||
self.meta.clone(),
|
|
||||||
self.ctx.clone(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> JsonSchema for FunctionParam<'a> {
|
|
||||||
fn schema_name() -> String {
|
|
||||||
"FunctionParam".to_owned()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
|
||||||
// TODO: Actually generate a reasonable schema.
|
|
||||||
gen.subschema_for::<()>()
|
|
||||||
}
|
|
||||||
}
|
|
@ -20,7 +20,6 @@ pub mod engine;
|
|||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod executor;
|
pub mod executor;
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
mod function_param;
|
|
||||||
pub mod lint;
|
pub mod lint;
|
||||||
pub mod lsp;
|
pub mod lsp;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
@ -29,6 +28,5 @@ pub mod std;
|
|||||||
pub mod test_server;
|
pub mod test_server;
|
||||||
pub mod thread;
|
pub mod thread;
|
||||||
pub mod token;
|
pub mod token;
|
||||||
pub mod walk;
|
|
||||||
#[cfg(target_arch = "wasm32")]
|
#[cfg(target_arch = "wasm32")]
|
||||||
pub mod wasm;
|
pub mod wasm;
|
||||||
|
236
src/wasm-lib/kcl/src/lint/ast_walk.rs
Normal file
236
src/wasm-lib/kcl/src/lint/ast_walk.rs
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
ast::types::{
|
||||||
|
BinaryPart, BodyItem, LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, ObjectProperty,
|
||||||
|
Parameter, Program, UnaryExpression, Value, VariableDeclarator,
|
||||||
|
},
|
||||||
|
lint::Node,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Walker is implemented by things that are able to walk an AST tree to
|
||||||
|
/// produce lints. This trait is implemented automatically for a few of the
|
||||||
|
/// common types, but can be manually implemented too.
|
||||||
|
pub trait Walker<'a> {
|
||||||
|
/// Walk will visit every element of the AST.
|
||||||
|
fn walk(&self, n: Node<'a>) -> Result<bool>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, FnT> Walker<'a> for FnT
|
||||||
|
where
|
||||||
|
FnT: Fn(Node<'a>) -> Result<bool>,
|
||||||
|
{
|
||||||
|
fn walk(&self, n: Node<'a>) -> Result<bool> {
|
||||||
|
self(n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run the Walker against all [Node]s in a [Program].
|
||||||
|
pub fn walk<'a, WalkT>(prog: &'a Program, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(prog.into())?;
|
||||||
|
|
||||||
|
for bi in &prog.body {
|
||||||
|
walk_body_item(bi, f)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_variable_declarator<'a, WalkT>(node: &'a VariableDeclarator, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
f.walk((&node.id).into())?;
|
||||||
|
walk_value(&node.init, f)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_parameter<'a, WalkT>(node: &'a Parameter, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
f.walk((&node.identifier).into())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_member_object<'a, WalkT>(node: &'a MemberObject, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_literal_identifier<'a, WalkT>(node: &'a LiteralIdentifier, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_member_expression<'a, WalkT>(node: &'a MemberExpression, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
|
||||||
|
walk_member_object(&node.object, f)?;
|
||||||
|
walk_literal_identifier(&node.property, f)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_binary_part<'a, WalkT>(node: &'a BinaryPart, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
match node {
|
||||||
|
BinaryPart::Literal(lit) => f.walk(lit.as_ref().into())?,
|
||||||
|
BinaryPart::Identifier(id) => f.walk(id.as_ref().into())?,
|
||||||
|
BinaryPart::BinaryExpression(be) => f.walk(be.as_ref().into())?,
|
||||||
|
BinaryPart::CallExpression(ce) => f.walk(ce.as_ref().into())?,
|
||||||
|
BinaryPart::UnaryExpression(ue) => {
|
||||||
|
walk_unary_expression(ue, f)?;
|
||||||
|
true
|
||||||
|
}
|
||||||
|
BinaryPart::MemberExpression(me) => {
|
||||||
|
walk_member_expression(me, f)?;
|
||||||
|
true
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn walk_value<'a, WalkT>(node: &'a Value, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
match node {
|
||||||
|
Value::Literal(lit) => {
|
||||||
|
f.walk(lit.as_ref().into())?;
|
||||||
|
}
|
||||||
|
Value::TagDeclarator(tag) => {
|
||||||
|
f.walk(tag.as_ref().into())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::Identifier(id) => {
|
||||||
|
// sometimes there's a bare Identifier without a Value::Identifier.
|
||||||
|
f.walk(id.as_ref().into())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::BinaryExpression(be) => {
|
||||||
|
f.walk(be.as_ref().into())?;
|
||||||
|
|
||||||
|
walk_binary_part(&be.left, f)?;
|
||||||
|
walk_binary_part(&be.right, f)?;
|
||||||
|
}
|
||||||
|
Value::FunctionExpression(fe) => {
|
||||||
|
f.walk(fe.as_ref().into())?;
|
||||||
|
|
||||||
|
for arg in &fe.params {
|
||||||
|
walk_parameter(arg, f)?;
|
||||||
|
}
|
||||||
|
walk(&fe.body, f)?;
|
||||||
|
}
|
||||||
|
Value::CallExpression(ce) => {
|
||||||
|
f.walk(ce.as_ref().into())?;
|
||||||
|
f.walk((&ce.callee).into())?;
|
||||||
|
for e in &ce.arguments {
|
||||||
|
walk_value::<WalkT>(e, f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::PipeExpression(pe) => {
|
||||||
|
f.walk(pe.as_ref().into())?;
|
||||||
|
|
||||||
|
for e in &pe.body {
|
||||||
|
walk_value::<WalkT>(e, f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::PipeSubstitution(ps) => {
|
||||||
|
f.walk(ps.as_ref().into())?;
|
||||||
|
}
|
||||||
|
Value::ArrayExpression(ae) => {
|
||||||
|
f.walk(ae.as_ref().into())?;
|
||||||
|
for e in &ae.elements {
|
||||||
|
walk_value::<WalkT>(e, f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::ObjectExpression(oe) => {
|
||||||
|
walk_object_expression(oe, f)?;
|
||||||
|
}
|
||||||
|
Value::MemberExpression(me) => {
|
||||||
|
walk_member_expression(me, f)?;
|
||||||
|
}
|
||||||
|
Value::UnaryExpression(ue) => {
|
||||||
|
walk_unary_expression(ue, f)?;
|
||||||
|
}
|
||||||
|
Value::None(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Walk through an [ObjectProperty].
|
||||||
|
fn walk_object_property<'a, WalkT>(node: &'a ObjectProperty, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
walk_value(&node.value, f)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Walk through an [ObjectExpression].
|
||||||
|
fn walk_object_expression<'a, WalkT>(node: &'a ObjectExpression, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
for prop in &node.properties {
|
||||||
|
walk_object_property(prop, f)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// walk through an [UnaryExpression].
|
||||||
|
fn walk_unary_expression<'a, WalkT>(node: &'a UnaryExpression, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
f.walk(node.into())?;
|
||||||
|
walk_binary_part(&node.argument, f)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// walk through a [BodyItem].
|
||||||
|
fn walk_body_item<'a, WalkT>(node: &'a BodyItem, f: &WalkT) -> Result<()>
|
||||||
|
where
|
||||||
|
WalkT: Walker<'a>,
|
||||||
|
{
|
||||||
|
// We don't walk a BodyItem since it's an enum itself.
|
||||||
|
|
||||||
|
match node {
|
||||||
|
BodyItem::ExpressionStatement(xs) => {
|
||||||
|
f.walk(xs.into())?;
|
||||||
|
walk_value(&xs.expression, f)?;
|
||||||
|
}
|
||||||
|
BodyItem::VariableDeclaration(vd) => {
|
||||||
|
f.walk(vd.into())?;
|
||||||
|
for dec in &vd.declarations {
|
||||||
|
walk_variable_declarator(dec, f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BodyItem::ReturnStatement(rs) => {
|
||||||
|
f.walk(rs.into())?;
|
||||||
|
walk_value(&rs.argument, f)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
@ -3,8 +3,10 @@ use anyhow::Result;
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::VariableDeclarator,
|
ast::types::VariableDeclarator,
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
lint::rule::{def_finding, Discovered, Finding},
|
lint::{
|
||||||
walk::Node,
|
rule::{def_finding, Discovered, Finding},
|
||||||
|
Node,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
def_finding!(
|
def_finding!(
|
||||||
@ -65,11 +67,7 @@ mod tests {
|
|||||||
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
|
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
|
||||||
}
|
}
|
||||||
|
|
||||||
test_finding!(
|
test_finding!(z0001_full_bad, lint_variables, Z0001, "\
|
||||||
z0001_full_bad,
|
|
||||||
lint_variables,
|
|
||||||
Z0001,
|
|
||||||
"\
|
|
||||||
// Define constants
|
// Define constants
|
||||||
const pipeLength = 40
|
const pipeLength = 40
|
||||||
const pipeSmallDia = 10
|
const pipeSmallDia = 10
|
||||||
@ -98,14 +96,9 @@ const Part001 = startSketchOn('XY')
|
|||||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> revolve({ axis: 'y' }, %)
|
|> revolve({ axis: 'y' }, %)
|
||||||
"
|
");
|
||||||
);
|
|
||||||
|
|
||||||
test_no_finding!(
|
test_no_finding!(z0001_full_good, lint_variables, Z0001, "\
|
||||||
z0001_full_good,
|
|
||||||
lint_variables,
|
|
||||||
Z0001,
|
|
||||||
"\
|
|
||||||
// Define constants
|
// Define constants
|
||||||
const pipeLength = 40
|
const pipeLength = 40
|
||||||
const pipeSmallDia = 10
|
const pipeSmallDia = 10
|
||||||
@ -134,6 +127,5 @@ const part001 = startSketchOn('XY')
|
|||||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> revolve({ axis: 'y' }, %)
|
|> revolve({ axis: 'y' }, %)
|
||||||
"
|
");
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
|
mod ast_node;
|
||||||
|
mod ast_walk;
|
||||||
pub mod checks;
|
pub mod checks;
|
||||||
pub mod rule;
|
pub mod rule;
|
||||||
|
|
||||||
|
pub use ast_node::Node;
|
||||||
|
pub use ast_walk::walk;
|
||||||
|
// pub(crate) use rule::{def_finding, finding};
|
||||||
pub use rule::{Discovered, Finding};
|
pub use rule::{Discovered, Finding};
|
||||||
|
@ -3,7 +3,7 @@ use schemars::JsonSchema;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||||
|
|
||||||
use crate::{executor::SourceRange, lsp::IntoDiagnostic, walk::Node};
|
use crate::{executor::SourceRange, lint::Node, lsp::IntoDiagnostic};
|
||||||
|
|
||||||
/// Check the provided AST for any found rule violations.
|
/// Check the provided AST for any found rule violations.
|
||||||
///
|
///
|
||||||
@ -70,10 +70,6 @@ impl IntoDiagnostic for Discovered {
|
|||||||
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||||
(&self).to_lsp_diagnostic(code)
|
(&self).to_lsp_diagnostic(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn severity(&self) -> DiagnosticSeverity {
|
|
||||||
(&self).severity()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoDiagnostic for &Discovered {
|
impl IntoDiagnostic for &Discovered {
|
||||||
@ -83,7 +79,7 @@ impl IntoDiagnostic for &Discovered {
|
|||||||
|
|
||||||
Diagnostic {
|
Diagnostic {
|
||||||
range: source_range.to_lsp_range(code),
|
range: source_range.to_lsp_range(code),
|
||||||
severity: Some(self.severity()),
|
severity: Some(DiagnosticSeverity::INFORMATION),
|
||||||
code: None,
|
code: None,
|
||||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||||
code_description: None,
|
code_description: None,
|
||||||
@ -94,10 +90,6 @@ impl IntoDiagnostic for &Discovered {
|
|||||||
data: None,
|
data: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn severity(&self) -> DiagnosticSeverity {
|
|
||||||
DiagnosticSeverity::INFORMATION
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Abstract lint problem type.
|
/// Abstract lint problem type.
|
||||||
|
@ -3,15 +3,59 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use dashmap::DashMap;
|
use tokio::sync::RwLock;
|
||||||
use tower_lsp::lsp_types::{
|
use tower_lsp::lsp_types::{
|
||||||
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializedParams, MessageType, RenameFilesParams,
|
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializedParams, MessageType,
|
||||||
TextDocumentItem, WorkspaceFolder,
|
RenameFilesParams, TextDocumentItem, WorkspaceFolder,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::fs::FileSystem;
|
use crate::{
|
||||||
|
fs::FileSystem,
|
||||||
|
lsp::safemap::SafeMap,
|
||||||
|
thread::{JoinHandle, Thread},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct InnerHandle(Arc<JoinHandle>);
|
||||||
|
|
||||||
|
impl InnerHandle {
|
||||||
|
pub fn new(handle: JoinHandle) -> Self {
|
||||||
|
Self(Arc::new(handle))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_finished(&self) -> bool {
|
||||||
|
self.0.is_finished()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cancel(&self) {
|
||||||
|
self.0.abort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct UpdateHandle(Arc<RwLock<Option<InnerHandle>>>);
|
||||||
|
|
||||||
|
impl UpdateHandle {
|
||||||
|
pub fn new(handle: InnerHandle) -> Self {
|
||||||
|
Self(Arc::new(RwLock::new(Some(handle))))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn read(&self) -> Option<InnerHandle> {
|
||||||
|
self.0.read().await.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn write(&self, handle: Option<InnerHandle>) {
|
||||||
|
*self.0.write().await = handle;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for UpdateHandle {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(Arc::new(RwLock::new(None)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A trait for the backend of the language server.
|
/// A trait for the backend of the language server.
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
@ -19,14 +63,18 @@ pub trait Backend: Clone + Send + Sync
|
|||||||
where
|
where
|
||||||
Self: 'static,
|
Self: 'static,
|
||||||
{
|
{
|
||||||
fn client(&self) -> &tower_lsp::Client;
|
fn client(&self) -> tower_lsp::Client;
|
||||||
|
|
||||||
fn fs(&self) -> &Arc<crate::fs::FileManager>;
|
fn fs(&self) -> Arc<crate::fs::FileManager>;
|
||||||
|
|
||||||
async fn is_initialized(&self) -> bool;
|
async fn is_initialized(&self) -> bool;
|
||||||
|
|
||||||
async fn set_is_initialized(&self, is_initialized: bool);
|
async fn set_is_initialized(&self, is_initialized: bool);
|
||||||
|
|
||||||
|
async fn current_handle(&self) -> Option<InnerHandle>;
|
||||||
|
|
||||||
|
async fn set_current_handle(&self, handle: Option<InnerHandle>);
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||||
@ -34,7 +82,7 @@ where
|
|||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||||
|
|
||||||
/// Get the current code map.
|
/// Get the current code map.
|
||||||
fn code_map(&self) -> &DashMap<String, Vec<u8>>;
|
fn code_map(&self) -> SafeMap<String, Vec<u8>>;
|
||||||
|
|
||||||
/// Insert a new code map.
|
/// Insert a new code map.
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
||||||
@ -46,36 +94,62 @@ where
|
|||||||
async fn clear_code_state(&self);
|
async fn clear_code_state(&self);
|
||||||
|
|
||||||
/// Get the current diagnostics map.
|
/// Get the current diagnostics map.
|
||||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>>;
|
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport>;
|
||||||
|
|
||||||
/// On change event.
|
/// On change event.
|
||||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
||||||
|
|
||||||
/// Check if the file has diagnostics.
|
/// Check if the file has diagnostics.
|
||||||
async fn has_diagnostics(&self, uri: &str) -> bool {
|
async fn has_diagnostics(&self, uri: &str) -> bool {
|
||||||
let Some(diagnostics) = self.current_diagnostics_map().get(uri) else {
|
if let Some(tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics)) =
|
||||||
return false;
|
self.current_diagnostics_map().get(uri).await
|
||||||
};
|
{
|
||||||
|
!diagnostics.full_document_diagnostic_report.items.is_empty()
|
||||||
!diagnostics.is_empty()
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn on_change(&self, params: TextDocumentItem) {
|
async fn on_change(&self, params: TextDocumentItem) {
|
||||||
// Check if the document is in the current code map and if it is the same as what we have
|
// Check if the document is in the current code map and if it is the same as what we have
|
||||||
// stored.
|
// stored.
|
||||||
let filename = params.uri.to_string();
|
let filename = params.uri.to_string();
|
||||||
if let Some(current_code) = self.code_map().get(&filename) {
|
if let Some(current_code) = self.code_map().get(&filename).await {
|
||||||
if *current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
if current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("on_change after check: {:?}", params);
|
// Check if we already have a handle running.
|
||||||
|
if let Some(current_handle) = self.current_handle().await {
|
||||||
|
self.set_current_handle(None).await;
|
||||||
|
// Drop that handle to cancel it.
|
||||||
|
current_handle.cancel();
|
||||||
|
}
|
||||||
|
|
||||||
self.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
let cloned = self.clone();
|
||||||
.await;
|
let task = JoinHandle::new(async move {
|
||||||
println!("on_change after insert: {:?}", params);
|
cloned
|
||||||
self.inner_on_change(params, false).await;
|
.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
||||||
|
.await;
|
||||||
|
cloned.inner_on_change(params, false).await;
|
||||||
|
cloned.set_current_handle(None).await;
|
||||||
|
});
|
||||||
|
let update_handle = InnerHandle::new(task);
|
||||||
|
|
||||||
|
// Set our new handle.
|
||||||
|
self.set_current_handle(Some(update_handle.clone())).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn wait_on_handle(&self) {
|
||||||
|
while let Some(handle) = self.current_handle().await {
|
||||||
|
if !handle.is_finished() {
|
||||||
|
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.set_current_handle(None).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
|
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
|
||||||
@ -137,7 +211,7 @@ where
|
|||||||
self.remove_workspace_folders(params.event.removed).await;
|
self.remove_workspace_folders(params.event.removed).await;
|
||||||
// Remove the code from the current code map.
|
// Remove the code from the current code map.
|
||||||
// We do this since it means the user is changing projects so let's refresh the state.
|
// We do this since it means the user is changing projects so let's refresh the state.
|
||||||
if !self.code_map().is_empty() && should_clear {
|
if !self.code_map().is_empty().await && should_clear {
|
||||||
self.clear_code_state().await;
|
self.clear_code_state().await;
|
||||||
}
|
}
|
||||||
for added in params.event.added {
|
for added in params.event.added {
|
||||||
|
@ -9,27 +9,28 @@ use std::{
|
|||||||
sync::{Arc, RwLock},
|
sync::{Arc, RwLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
use dashmap::DashMap;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tower_lsp::{
|
use tower_lsp::{
|
||||||
jsonrpc::{Error, Result},
|
jsonrpc::{Error, Result},
|
||||||
lsp_types::{
|
lsp_types::{
|
||||||
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializeResult, InitializedParams,
|
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializeParams,
|
||||||
MessageType, OneOf, RenameFilesParams, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability,
|
InitializeResult, InitializedParams, MessageType, OneOf, RenameFilesParams, ServerCapabilities,
|
||||||
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
|
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder,
|
||||||
WorkspaceServerCapabilities,
|
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||||
},
|
},
|
||||||
LanguageServer,
|
LanguageServer,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::backend::{InnerHandle, UpdateHandle};
|
||||||
use crate::lsp::{
|
use crate::lsp::{
|
||||||
backend::Backend as _,
|
backend::Backend as _,
|
||||||
copilot::types::{
|
copilot::types::{
|
||||||
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
|
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
|
||||||
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
|
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
|
||||||
},
|
},
|
||||||
|
safemap::SafeMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug)]
|
#[derive(Deserialize, Serialize, Debug)]
|
||||||
@ -49,9 +50,9 @@ pub struct Backend {
|
|||||||
/// The file system client to use.
|
/// The file system client to use.
|
||||||
pub fs: Arc<crate::fs::FileManager>,
|
pub fs: Arc<crate::fs::FileManager>,
|
||||||
/// The workspace folders.
|
/// The workspace folders.
|
||||||
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
||||||
/// Current code.
|
/// Current code.
|
||||||
pub code_map: DashMap<String, Vec<u8>>,
|
pub code_map: SafeMap<String, Vec<u8>>,
|
||||||
/// The Zoo API client.
|
/// The Zoo API client.
|
||||||
pub zoo_client: kittycad::Client,
|
pub zoo_client: kittycad::Client,
|
||||||
/// The editor info is used to store information about the editor.
|
/// The editor info is used to store information about the editor.
|
||||||
@ -59,22 +60,21 @@ pub struct Backend {
|
|||||||
/// The cache is used to store the results of previous requests.
|
/// The cache is used to store the results of previous requests.
|
||||||
pub cache: Arc<cache::CopilotCache>,
|
pub cache: Arc<cache::CopilotCache>,
|
||||||
/// Storage so we can send telemetry data back out.
|
/// Storage so we can send telemetry data back out.
|
||||||
pub telemetry: DashMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
||||||
/// Diagnostics.
|
|
||||||
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
|
||||||
|
|
||||||
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
||||||
|
pub current_handle: UpdateHandle,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the shared backend trait for the language server.
|
// Implement the shared backend trait for the language server.
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
impl crate::lsp::backend::Backend for Backend {
|
impl crate::lsp::backend::Backend for Backend {
|
||||||
fn client(&self) -> &tower_lsp::Client {
|
fn client(&self) -> tower_lsp::Client {
|
||||||
&self.client
|
self.client.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||||
&self.fs
|
self.fs.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_initialized(&self) -> bool {
|
async fn is_initialized(&self) -> bool {
|
||||||
@ -85,41 +85,48 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
*self.is_initialized.write().await = is_initialized;
|
*self.is_initialized.write().await = is_initialized;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn current_handle(&self) -> Option<InnerHandle> {
|
||||||
|
self.current_handle.read().await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
||||||
|
self.current_handle.write(handle).await;
|
||||||
|
}
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||||
// TODO: fix clone
|
self.workspace_folders.inner().await.values().cloned().collect()
|
||||||
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.insert(folder.name.to_string(), folder);
|
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.remove(&folder.name);
|
self.workspace_folders.remove(&folder.name).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||||
&self.code_map
|
self.code_map.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||||
self.code_map.insert(uri, text);
|
self.code_map.insert(uri, text).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||||
self.code_map.remove(&uri).map(|(_, v)| v)
|
self.code_map.remove(&uri).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_code_state(&self) {
|
async fn clear_code_state(&self) {
|
||||||
self.code_map.clear();
|
self.code_map.clear().await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||||
&self.diagnostics_map
|
Default::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
|
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
|
||||||
@ -131,15 +138,8 @@ impl Backend {
|
|||||||
/// Get completions from the kittycad api.
|
/// Get completions from the kittycad api.
|
||||||
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
|
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
|
||||||
let body = kittycad::types::KclCodeCompletionRequest {
|
let body = kittycad::types::KclCodeCompletionRequest {
|
||||||
extra: Some(kittycad::types::KclCodeCompletionParams {
|
prompt: Some(prompt.clone()),
|
||||||
language: Some(language.to_string()),
|
suffix: Some(suffix.clone()),
|
||||||
next_indent: None,
|
|
||||||
trim_by_indentation: true,
|
|
||||||
prompt_tokens: Some(prompt.len() as u32),
|
|
||||||
suffix_tokens: Some(suffix.len() as u32),
|
|
||||||
}),
|
|
||||||
prompt: Some(prompt),
|
|
||||||
suffix: Some(suffix),
|
|
||||||
max_tokens: Some(500),
|
max_tokens: Some(500),
|
||||||
temperature: Some(1.0),
|
temperature: Some(1.0),
|
||||||
top_p: Some(1.0),
|
top_p: Some(1.0),
|
||||||
@ -149,6 +149,13 @@ impl Backend {
|
|||||||
nwo: None,
|
nwo: None,
|
||||||
// We haven't implemented streaming yet.
|
// We haven't implemented streaming yet.
|
||||||
stream: false,
|
stream: false,
|
||||||
|
extra: Some(kittycad::types::KclCodeCompletionParams {
|
||||||
|
language: Some(language.to_string()),
|
||||||
|
next_indent: None,
|
||||||
|
trim_by_indentation: true,
|
||||||
|
prompt_tokens: Some(prompt.len() as u32),
|
||||||
|
suffix_tokens: Some(suffix.len() as u32),
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
let resp = self
|
let resp = self
|
||||||
@ -227,7 +234,7 @@ impl Backend {
|
|||||||
completion: completion.clone(),
|
completion: completion.clone(),
|
||||||
params: params.clone(),
|
params: params.clone(),
|
||||||
};
|
};
|
||||||
self.telemetry.insert(completion.uuid, telemetry);
|
self.telemetry.insert(completion.uuid, telemetry).await;
|
||||||
}
|
}
|
||||||
self.cache
|
self.cache
|
||||||
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
|
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
|
||||||
@ -241,7 +248,7 @@ impl Backend {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Get the original telemetry data.
|
// Get the original telemetry data.
|
||||||
let Some(original) = self.telemetry.remove(¶ms.uuid) else {
|
let Some(original) = self.telemetry.remove(¶ms.uuid).await else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -260,7 +267,7 @@ impl Backend {
|
|||||||
// Get the original telemetry data.
|
// Get the original telemetry data.
|
||||||
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
|
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
|
||||||
for uuid in params.uuids {
|
for uuid in params.uuids {
|
||||||
if let Some(original) = self.telemetry.remove(&uuid).map(|(_, v)| v) {
|
if let Some(original) = self.telemetry.remove(&uuid).await {
|
||||||
originals.push(original);
|
originals.push(original);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -333,7 +340,7 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
self.do_did_change(params).await;
|
self.do_did_change(params.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||||
|
@ -14,13 +14,12 @@ pub mod custom_notifications;
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
#[cfg(feature = "cli")]
|
#[cfg(feature = "cli")]
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use dashmap::DashMap;
|
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
use tower_lsp::{
|
use tower_lsp::{
|
||||||
jsonrpc::Result as RpcResult,
|
jsonrpc::Result as RpcResult,
|
||||||
lsp_types::{
|
lsp_types::{
|
||||||
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
||||||
DeleteFilesParams, Diagnostic, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
||||||
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
||||||
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
||||||
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||||
@ -44,7 +43,11 @@ use crate::lint::checks;
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{Value, VariableKind},
|
ast::types::{Value, VariableKind},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
lsp::{backend::Backend as _, util::IntoDiagnostic},
|
lsp::{
|
||||||
|
backend::{Backend as _, InnerHandle, UpdateHandle},
|
||||||
|
safemap::SafeMap,
|
||||||
|
util::IntoDiagnostic,
|
||||||
|
},
|
||||||
parser::PIPE_OPERATOR,
|
parser::PIPE_OPERATOR,
|
||||||
token::TokenType,
|
token::TokenType,
|
||||||
};
|
};
|
||||||
@ -65,9 +68,6 @@ lazy_static::lazy_static! {
|
|||||||
vec![
|
vec![
|
||||||
SemanticTokenModifier::DECLARATION,
|
SemanticTokenModifier::DECLARATION,
|
||||||
SemanticTokenModifier::DEFINITION,
|
SemanticTokenModifier::DEFINITION,
|
||||||
SemanticTokenModifier::DEFAULT_LIBRARY,
|
|
||||||
SemanticTokenModifier::READONLY,
|
|
||||||
SemanticTokenModifier::STATIC,
|
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -93,25 +93,25 @@ pub struct Backend {
|
|||||||
/// The file system client to use.
|
/// The file system client to use.
|
||||||
pub fs: Arc<crate::fs::FileManager>,
|
pub fs: Arc<crate::fs::FileManager>,
|
||||||
/// The workspace folders.
|
/// The workspace folders.
|
||||||
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
||||||
/// The stdlib completions for the language.
|
/// The stdlib completions for the language.
|
||||||
pub stdlib_completions: HashMap<String, CompletionItem>,
|
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||||
/// The stdlib signatures for the language.
|
/// The stdlib signatures for the language.
|
||||||
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||||
/// Token maps.
|
/// Token maps.
|
||||||
pub token_map: DashMap<String, Vec<crate::token::Token>>,
|
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
||||||
/// AST maps.
|
/// AST maps.
|
||||||
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
pub ast_map: SafeMap<String, crate::ast::types::Program>,
|
||||||
/// Memory maps.
|
/// Memory maps.
|
||||||
pub memory_map: DashMap<String, crate::executor::ProgramMemory>,
|
pub memory_map: SafeMap<String, crate::executor::ProgramMemory>,
|
||||||
/// Current code.
|
/// Current code.
|
||||||
pub code_map: DashMap<String, Vec<u8>>,
|
pub code_map: SafeMap<String, Vec<u8>>,
|
||||||
/// Diagnostics.
|
/// Diagnostics.
|
||||||
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
|
||||||
/// Symbols map.
|
/// Symbols map.
|
||||||
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
|
pub symbols_map: SafeMap<String, Vec<DocumentSymbol>>,
|
||||||
/// Semantic tokens map.
|
/// Semantic tokens map.
|
||||||
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
|
pub semantic_tokens_map: SafeMap<String, Vec<SemanticToken>>,
|
||||||
/// The Zoo API client.
|
/// The Zoo API client.
|
||||||
pub zoo_client: kittycad::Client,
|
pub zoo_client: kittycad::Client,
|
||||||
/// If we can send telemetry for this user.
|
/// If we can send telemetry for this user.
|
||||||
@ -122,17 +122,18 @@ pub struct Backend {
|
|||||||
pub can_execute: Arc<RwLock<bool>>,
|
pub can_execute: Arc<RwLock<bool>>,
|
||||||
|
|
||||||
pub is_initialized: Arc<RwLock<bool>>,
|
pub is_initialized: Arc<RwLock<bool>>,
|
||||||
|
pub current_handle: UpdateHandle,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implement the shared backend trait for the language server.
|
// Implement the shared backend trait for the language server.
|
||||||
#[async_trait::async_trait]
|
#[async_trait::async_trait]
|
||||||
impl crate::lsp::backend::Backend for Backend {
|
impl crate::lsp::backend::Backend for Backend {
|
||||||
fn client(&self) -> &Client {
|
fn client(&self) -> Client {
|
||||||
&self.client
|
self.client.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||||
&self.fs
|
self.fs.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn is_initialized(&self) -> bool {
|
async fn is_initialized(&self) -> bool {
|
||||||
@ -143,76 +144,84 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
*self.is_initialized.write().await = is_initialized;
|
*self.is_initialized.write().await = is_initialized;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn current_handle(&self) -> Option<InnerHandle> {
|
||||||
|
self.current_handle.read().await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
||||||
|
self.current_handle.write(handle).await;
|
||||||
|
}
|
||||||
|
|
||||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||||
// TODO: fix clone
|
self.workspace_folders.inner().await.values().cloned().collect()
|
||||||
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.insert(folder.name.to_string(), folder);
|
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||||
for folder in folders {
|
for folder in folders {
|
||||||
self.workspace_folders.remove(&folder.name);
|
self.workspace_folders.remove(&folder.name).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||||
&self.code_map
|
self.code_map.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||||
self.code_map.insert(uri, text);
|
self.code_map.insert(uri, text).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||||
self.code_map.remove(&uri).map(|x| x.1)
|
self.code_map.remove(&uri).await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_code_state(&self) {
|
async fn clear_code_state(&self) {
|
||||||
self.code_map.clear();
|
self.code_map.clear().await;
|
||||||
self.token_map.clear();
|
self.token_map.clear().await;
|
||||||
self.ast_map.clear();
|
self.ast_map.clear().await;
|
||||||
self.diagnostics_map.clear();
|
self.diagnostics_map.clear().await;
|
||||||
self.symbols_map.clear();
|
self.symbols_map.clear().await;
|
||||||
self.semantic_tokens_map.clear();
|
self.semantic_tokens_map.clear().await;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||||
&self.diagnostics_map
|
self.diagnostics_map.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
||||||
let filename = params.uri.to_string();
|
|
||||||
// We already updated the code map in the shared backend.
|
// We already updated the code map in the shared backend.
|
||||||
|
|
||||||
// Lets update the tokens.
|
// Lets update the tokens.
|
||||||
let tokens = match crate::token::lexer(¶ms.text) {
|
let tokens = match crate::token::lexer(¶ms.text) {
|
||||||
Ok(tokens) => tokens,
|
Ok(tokens) => tokens,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.add_to_diagnostics(¶ms, &[err], true).await;
|
self.add_to_diagnostics(¶ms, err, true).await;
|
||||||
self.token_map.remove(&filename);
|
self.token_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.ast_map.remove(&filename);
|
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.symbols_map.remove(&filename);
|
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.semantic_tokens_map.remove(&filename);
|
self.semantic_tokens_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.memory_map.remove(&filename);
|
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get the previous tokens.
|
||||||
|
let previous_tokens = self.token_map.get(¶ms.uri.to_string()).await;
|
||||||
|
|
||||||
// Try to get the memory for the current code.
|
// Try to get the memory for the current code.
|
||||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
let has_memory = if let Some(memory) = self.memory_map.get(¶ms.uri.to_string()).await {
|
||||||
*memory != crate::executor::ProgramMemory::default()
|
memory != crate::executor::ProgramMemory::default()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get the previous tokens.
|
let tokens_changed = if let Some(previous_tokens) = previous_tokens.clone() {
|
||||||
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
|
previous_tokens != tokens
|
||||||
*previous_tokens != tokens
|
|
||||||
} else {
|
} else {
|
||||||
true
|
true
|
||||||
};
|
};
|
||||||
@ -225,9 +234,9 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
|
|
||||||
if tokens_changed {
|
if tokens_changed {
|
||||||
// Update our token map.
|
// Update our token map.
|
||||||
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
|
||||||
// Update our semantic tokens.
|
// Update our semantic tokens.
|
||||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
self.update_semantic_tokens(tokens.clone(), ¶ms).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lets update the ast.
|
// Lets update the ast.
|
||||||
@ -236,19 +245,19 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
let ast = match result {
|
let ast = match result {
|
||||||
Ok(ast) => ast,
|
Ok(ast) => ast,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.add_to_diagnostics(¶ms, &[err], true).await;
|
self.add_to_diagnostics(¶ms, err, true).await;
|
||||||
self.ast_map.remove(&filename);
|
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.symbols_map.remove(&filename);
|
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.memory_map.remove(&filename);
|
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check if the ast changed.
|
// Check if the ast changed.
|
||||||
let ast_changed = match self.ast_map.get(&filename) {
|
let ast_changed = match self.ast_map.get(¶ms.uri.to_string()).await {
|
||||||
Some(old_ast) => {
|
Some(old_ast) => {
|
||||||
// Check if the ast changed.
|
// Check if the ast changed.
|
||||||
*old_ast != ast
|
old_ast != ast
|
||||||
}
|
}
|
||||||
None => true,
|
None => true,
|
||||||
};
|
};
|
||||||
@ -259,15 +268,17 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ast_changed {
|
if ast_changed {
|
||||||
self.ast_map.insert(params.uri.to_string(), ast.clone());
|
self.ast_map.insert(params.uri.to_string(), ast.clone()).await;
|
||||||
// Update the symbols map.
|
// Update the symbols map.
|
||||||
self.symbols_map.insert(
|
self.symbols_map
|
||||||
params.uri.to_string(),
|
.insert(
|
||||||
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
params.uri.to_string(),
|
||||||
);
|
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
// Update our semantic tokens.
|
// Update our semantic tokens.
|
||||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
self.update_semantic_tokens(tokens, ¶ms).await;
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
{
|
{
|
||||||
@ -276,7 +287,12 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
self.add_to_diagnostics(¶ms, &discovered_findings, false).await;
|
// Clear the lints before we lint.
|
||||||
|
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
||||||
|
.await;
|
||||||
|
for discovered_finding in &discovered_findings {
|
||||||
|
self.add_to_diagnostics(¶ms, discovered_finding, false).await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -292,7 +308,7 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
// Execute the code if we have an executor context.
|
// Execute the code if we have an executor context.
|
||||||
// This function automatically executes if we should & updates the diagnostics if we got
|
// This function automatically executes if we should & updates the diagnostics if we got
|
||||||
// errors.
|
// errors.
|
||||||
if self.execute(¶ms, &ast).await.is_err() {
|
if self.execute(¶ms, ast.clone()).await.is_err() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -307,22 +323,35 @@ impl Backend {
|
|||||||
*self.can_execute.read().await
|
*self.can_execute.read().await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn executor_ctx(&self) -> tokio::sync::RwLockReadGuard<'_, Option<crate::executor::ExecutorContext>> {
|
async fn set_can_execute(&self, can_execute: bool) {
|
||||||
self.executor_ctx.read().await
|
*self.can_execute.write().await = can_execute;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
|
pub async fn executor_ctx(&self) -> Option<crate::executor::ExecutorContext> {
|
||||||
|
self.executor_ctx.read().await.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_executor_ctx(&self, executor_ctx: crate::executor::ExecutorContext) {
|
||||||
|
*self.executor_ctx.write().await = Some(executor_ctx);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_semantic_tokens(&self, tokens: Vec<crate::token::Token>, params: &TextDocumentItem) {
|
||||||
// Update the semantic tokens map.
|
// Update the semantic tokens map.
|
||||||
let mut semantic_tokens = vec![];
|
let mut semantic_tokens = vec![];
|
||||||
let mut last_position = Position::new(0, 0);
|
let mut last_position = Position::new(0, 0);
|
||||||
for token in tokens {
|
for token in &tokens {
|
||||||
let Ok(token_type) = SemanticTokenType::try_from(token.token_type) else {
|
let Ok(mut token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||||
// We continue here because not all tokens can be converted this way, we will get
|
// We continue here because not all tokens can be converted this way, we will get
|
||||||
// the rest from the ast.
|
// the rest from the ast.
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut token_type_index = match self.get_semantic_token_type_index(&token_type) {
|
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
|
||||||
|
// This is a stdlib function.
|
||||||
|
token_type = SemanticTokenType::FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
// This is actually bad this should not fail.
|
// This is actually bad this should not fail.
|
||||||
// The test for listing all semantic token types should make this never happen.
|
// The test for listing all semantic token types should make this never happen.
|
||||||
@ -337,21 +366,21 @@ impl Backend {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let source_range: SourceRange = token.into();
|
let source_range: SourceRange = token.clone().into();
|
||||||
let position = source_range.start_to_lsp_position(¶ms.text);
|
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||||
|
|
||||||
// Calculate the token modifiers.
|
// Calculate the token modifiers.
|
||||||
// Get the value at the current position.
|
// Get the value at the current position.
|
||||||
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
|
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||||
crate::walk::walk(&ast, &|node: crate::walk::Node| {
|
crate::lint::walk(&ast, &|node: crate::lint::Node| {
|
||||||
let node_range: SourceRange = (&node).into();
|
let node_range: SourceRange = (&node).into();
|
||||||
if !node_range.contains(source_range.start()) {
|
if !node_range.contains(source_range.start()) {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
let get_modifier = |modifier: Vec<SemanticTokenModifier>| -> Result<bool> {
|
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
|
||||||
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
@ -365,73 +394,61 @@ impl Backend {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match node {
|
match node {
|
||||||
crate::walk::Node::TagDeclarator(_) => {
|
crate::lint::Node::TagDeclarator(_) => {
|
||||||
return get_modifier(vec![
|
return get_modifier(SemanticTokenModifier::DEFINITION);
|
||||||
SemanticTokenModifier::DEFINITION,
|
|
||||||
SemanticTokenModifier::STATIC,
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
crate::walk::Node::VariableDeclarator(variable) => {
|
crate::lint::Node::VariableDeclarator(variable) => {
|
||||||
let sr: SourceRange = (&variable.id).into();
|
let sr: SourceRange = variable.id.clone().into();
|
||||||
if sr.contains(source_range.start()) {
|
if sr.contains(source_range.start()) {
|
||||||
if let Value::FunctionExpression(_) = &variable.init {
|
if let Value::FunctionExpression(_) = &variable.init {
|
||||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => token_type_index,
|
None => token_type_index,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return get_modifier(vec![
|
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||||
SemanticTokenModifier::DECLARATION,
|
|
||||||
SemanticTokenModifier::READONLY,
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::walk::Node::Parameter(_) => {
|
crate::lint::Node::Parameter(_) => {
|
||||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PARAMETER) {
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => token_type_index,
|
None => token_type_index,
|
||||||
};
|
};
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
crate::walk::Node::MemberExpression(member_expression) => {
|
crate::lint::Node::MemberExpression(member_expression) => {
|
||||||
let sr: SourceRange = (&member_expression.property).into();
|
let sr: SourceRange = member_expression.property.clone().into();
|
||||||
if sr.contains(source_range.start()) {
|
if sr.contains(source_range.start()) {
|
||||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => token_type_index,
|
None => token_type_index,
|
||||||
};
|
};
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
crate::walk::Node::ObjectProperty(object_property) => {
|
crate::lint::Node::ObjectProperty(object_property) => {
|
||||||
let sr: SourceRange = (&object_property.key).into();
|
let sr: SourceRange = object_property.key.clone().into();
|
||||||
if sr.contains(source_range.start()) {
|
if sr.contains(source_range.start()) {
|
||||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => token_type_index,
|
None => token_type_index,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return get_modifier(vec![SemanticTokenModifier::DECLARATION]);
|
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||||
}
|
}
|
||||||
crate::walk::Node::CallExpression(call_expr) => {
|
crate::lint::Node::CallExpression(call_expr) => {
|
||||||
let sr: SourceRange = (&call_expr.callee).into();
|
let sr: SourceRange = call_expr.callee.clone().into();
|
||||||
if sr.contains(source_range.start()) {
|
if sr.contains(source_range.start()) {
|
||||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
None => token_type_index,
|
None => token_type_index,
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.stdlib_completions.contains_key(&call_expr.callee.name) {
|
|
||||||
// This is a stdlib function.
|
|
||||||
return get_modifier(vec![SemanticTokenModifier::DEFAULT_LIBRARY]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -492,12 +509,15 @@ impl Backend {
|
|||||||
|
|
||||||
last_position = position;
|
last_position = position;
|
||||||
}
|
}
|
||||||
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
|
self.semantic_tokens_map
|
||||||
|
.insert(params.uri.to_string(), semantic_tokens)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
||||||
let Some(mut items) = self.diagnostics_map.get_mut(uri.as_str()) else {
|
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
|
||||||
return;
|
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items.clone(),
|
||||||
|
_ => vec![],
|
||||||
};
|
};
|
||||||
|
|
||||||
// If we only want to clear a specific severity, do that.
|
// If we only want to clear a specific severity, do that.
|
||||||
@ -507,83 +527,94 @@ impl Backend {
|
|||||||
items.clear();
|
items.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
if items.is_empty() {
|
self.diagnostics_map
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
.insert(
|
||||||
{
|
uri.to_string(),
|
||||||
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||||
}
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: items.clone(),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
// We need to drop the items here.
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
drop(items);
|
{
|
||||||
|
self.client.publish_diagnostics(uri.clone(), items, None).await;
|
||||||
self.diagnostics_map.remove(uri.as_str());
|
|
||||||
} else {
|
|
||||||
// We don't need to update the map since we used get_mut.
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
|
||||||
{
|
|
||||||
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
|
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
|
||||||
&self,
|
&self,
|
||||||
params: &TextDocumentItem,
|
params: &TextDocumentItem,
|
||||||
diagnostics: &[DiagT],
|
diagnostic: DiagT,
|
||||||
clear_all_before_add: bool,
|
clear_all_before_add: bool,
|
||||||
) {
|
) {
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostics))
|
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostic))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
let diagnostic = diagnostic.to_lsp_diagnostic(¶ms.text);
|
||||||
|
|
||||||
if clear_all_before_add {
|
if clear_all_before_add {
|
||||||
self.clear_diagnostics_map(¶ms.uri, None).await;
|
self.clear_diagnostics_map(¶ms.uri, None).await;
|
||||||
} else if diagnostics.iter().all(|x| x.severity() == DiagnosticSeverity::ERROR) {
|
} else if diagnostic.severity == Some(DiagnosticSeverity::ERROR) {
|
||||||
// If the diagnostic is an error, it will be the only error we get since that halts
|
// If the diagnostic is an error, it will be the only error we get since that halts
|
||||||
// execution.
|
// execution.
|
||||||
// Clear the diagnostics before we add a new one.
|
// Clear the diagnostics before we add a new one.
|
||||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
||||||
.await;
|
.await;
|
||||||
} else if diagnostics
|
|
||||||
.iter()
|
|
||||||
.all(|x| x.severity() == DiagnosticSeverity::INFORMATION)
|
|
||||||
{
|
|
||||||
// If the diagnostic is a lint, we will pass them all to add at once so we need to
|
|
||||||
// clear the old ones.
|
|
||||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
|
let DocumentDiagnosticReport::Full(mut report) = self
|
||||||
// TODO: Would be awesome to fix the clone here.
|
.diagnostics_map
|
||||||
items.clone()
|
.get(params.uri.clone().as_str())
|
||||||
} else {
|
.await
|
||||||
vec![]
|
.unwrap_or(DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||||
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: vec![],
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
};
|
};
|
||||||
|
|
||||||
for diagnostic in diagnostics {
|
// Ensure we don't already have this diagnostic.
|
||||||
let d = diagnostic.to_lsp_diagnostic(¶ms.text);
|
if report
|
||||||
// Make sure we don't duplicate diagnostics.
|
.full_document_diagnostic_report
|
||||||
if !items.iter().any(|x| x == &d) {
|
.items
|
||||||
items.push(d);
|
.iter()
|
||||||
}
|
.any(|x| x == &diagnostic)
|
||||||
|
{
|
||||||
|
self.client
|
||||||
|
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
||||||
|
.await;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.diagnostics_map.insert(params.uri.to_string(), items.clone());
|
report.full_document_diagnostic_report.items.push(diagnostic);
|
||||||
|
|
||||||
self.client.publish_diagnostics(params.uri.clone(), items, None).await;
|
self.diagnostics_map
|
||||||
|
.insert(params.uri.to_string(), DocumentDiagnosticReport::Full(report.clone()))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
self.client
|
||||||
|
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
|
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) -> Result<()> {
|
||||||
// Check if we can execute.
|
// Check if we can execute.
|
||||||
if !self.can_execute().await {
|
if !self.can_execute().await {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute the code if we have an executor context.
|
// Execute the code if we have an executor context.
|
||||||
let ctx = self.executor_ctx().await;
|
let Some(executor_ctx) = self.executor_ctx().await else {
|
||||||
let Some(ref executor_ctx) = *ctx else {
|
|
||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -598,16 +629,17 @@ impl Backend {
|
|||||||
let memory = match executor_ctx.run(ast, None).await {
|
let memory = match executor_ctx.run(ast, None).await {
|
||||||
Ok(memory) => memory,
|
Ok(memory) => memory,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.memory_map.remove(params.uri.as_str());
|
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||||
self.add_to_diagnostics(params, &[err], false).await;
|
self.add_to_diagnostics(params, err, false).await;
|
||||||
|
|
||||||
// Since we already published the diagnostics we don't really care about the error
|
// Since we already published the diagnostics we don't really care about the error
|
||||||
// string.
|
// string.
|
||||||
return Err(anyhow::anyhow!("failed to execute code"));
|
return Err(anyhow::anyhow!("failed to execute code"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
drop(executor_ctx);
|
||||||
|
|
||||||
self.memory_map.insert(params.uri.to_string(), memory.clone());
|
self.memory_map.insert(params.uri.to_string(), memory.clone()).await;
|
||||||
|
|
||||||
// Send the notification to the client that the memory was updated.
|
// Send the notification to the client that the memory was updated.
|
||||||
self.client
|
self.client
|
||||||
@ -617,36 +649,22 @@ impl Backend {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_semantic_token_type_index(&self, token_type: &SemanticTokenType) -> Option<u32> {
|
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
|
||||||
SEMANTIC_TOKEN_TYPES
|
SEMANTIC_TOKEN_TYPES
|
||||||
.iter()
|
.iter()
|
||||||
.position(|x| *x == *token_type)
|
.position(|x| *x == token_type)
|
||||||
.map(|y| y as u32)
|
.map(|y| y as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_semantic_token_modifier_index(&self, token_types: Vec<SemanticTokenModifier>) -> Option<u32> {
|
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
|
||||||
if token_types.is_empty() {
|
SEMANTIC_TOKEN_MODIFIERS
|
||||||
return None;
|
.iter()
|
||||||
}
|
.position(|x| *x == token_type)
|
||||||
|
.map(|y| y as u32)
|
||||||
let mut modifier = None;
|
|
||||||
for token_type in token_types {
|
|
||||||
if let Some(index) = SEMANTIC_TOKEN_MODIFIERS
|
|
||||||
.iter()
|
|
||||||
.position(|x| *x == token_type)
|
|
||||||
.map(|y| y as u32)
|
|
||||||
{
|
|
||||||
modifier = match modifier {
|
|
||||||
Some(modifier) => Some(modifier | index),
|
|
||||||
None => Some(index),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
modifier
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||||
let ast = match self.ast_map.get(file_name) {
|
let ast = match self.ast_map.get(file_name).await {
|
||||||
Some(ast) => ast,
|
Some(ast) => ast,
|
||||||
None => return vec![],
|
None => return vec![],
|
||||||
};
|
};
|
||||||
@ -663,9 +681,7 @@ impl Backend {
|
|||||||
// Collect all the file data we know.
|
// Collect all the file data we know.
|
||||||
let mut buf = vec![];
|
let mut buf = vec![];
|
||||||
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
|
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
|
||||||
for code in self.code_map.iter() {
|
for (entry, value) in self.code_map.inner().await.iter() {
|
||||||
let entry = code.key();
|
|
||||||
let value = code.value();
|
|
||||||
let file_name = entry.replace("file://", "").to_string();
|
let file_name = entry.replace("file://", "").to_string();
|
||||||
|
|
||||||
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
||||||
@ -701,7 +717,7 @@ impl Backend {
|
|||||||
// Get the workspace folders.
|
// Get the workspace folders.
|
||||||
// The key of the workspace folder is the project name.
|
// The key of the workspace folder is the project name.
|
||||||
let workspace_folders = self.workspace_folders().await;
|
let workspace_folders = self.workspace_folders().await;
|
||||||
let project_names: Vec<&str> = workspace_folders.iter().map(|v| v.name.as_str()).collect::<Vec<_>>();
|
let project_names: Vec<String> = workspace_folders.iter().map(|v| v.name.clone()).collect::<Vec<_>>();
|
||||||
// Get the first name.
|
// Get the first name.
|
||||||
let project_name = project_names
|
let project_name = project_names
|
||||||
.first()
|
.first()
|
||||||
@ -748,9 +764,7 @@ impl Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut ctx = self.executor_ctx.write().await;
|
let Some(mut executor_ctx) = self.executor_ctx().await else {
|
||||||
// Borrow the executor context mutably.
|
|
||||||
let Some(ref mut executor_ctx) = *ctx else {
|
|
||||||
self.client
|
self.client
|
||||||
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
||||||
.await;
|
.await;
|
||||||
@ -762,8 +776,8 @@ impl Backend {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Try to get the memory for the current code.
|
// Try to get the memory for the current code.
|
||||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
let has_memory = if let Some(memory) = self.memory_map.get(&filename).await {
|
||||||
*memory != crate::executor::ProgramMemory::default()
|
memory != crate::executor::ProgramMemory::default()
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
@ -778,6 +792,10 @@ impl Backend {
|
|||||||
|
|
||||||
// Set the engine units.
|
// Set the engine units.
|
||||||
executor_ctx.update_units(params.units);
|
executor_ctx.update_units(params.units);
|
||||||
|
|
||||||
|
// Update the locked executor context.
|
||||||
|
self.set_executor_ctx(executor_ctx.clone()).await;
|
||||||
|
drop(executor_ctx);
|
||||||
}
|
}
|
||||||
// Lock is dropped here since nested.
|
// Lock is dropped here since nested.
|
||||||
// This is IMPORTANT.
|
// This is IMPORTANT.
|
||||||
@ -805,13 +823,20 @@ impl Backend {
|
|||||||
&self,
|
&self,
|
||||||
params: custom_notifications::UpdateCanExecuteParams,
|
params: custom_notifications::UpdateCanExecuteParams,
|
||||||
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
|
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
|
||||||
let mut can_execute = self.can_execute.write().await;
|
let can_execute = self.can_execute().await;
|
||||||
|
|
||||||
if *can_execute == params.can_execute {
|
if can_execute == params.can_execute {
|
||||||
return Ok(custom_notifications::UpdateCanExecuteResponse {});
|
return Ok(custom_notifications::UpdateCanExecuteResponse {});
|
||||||
}
|
}
|
||||||
|
|
||||||
*can_execute = params.can_execute;
|
if !params.can_execute {
|
||||||
|
// Kill any in progress executions.
|
||||||
|
if let Some(current_handle) = self.current_handle().await {
|
||||||
|
current_handle.cancel();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.set_can_execute(params.can_execute).await;
|
||||||
|
|
||||||
Ok(custom_notifications::UpdateCanExecuteResponse {})
|
Ok(custom_notifications::UpdateCanExecuteResponse {})
|
||||||
}
|
}
|
||||||
@ -924,7 +949,7 @@ impl LanguageServer for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
self.do_did_change(params).await;
|
self.do_did_change(params.clone()).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||||
@ -963,7 +988,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
||||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename) else {
|
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -973,7 +998,7 @@ impl LanguageServer for Backend {
|
|||||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||||
|
|
||||||
// Let's iterate over the AST and find the node that contains the cursor.
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
let Some(ast) = self.ast_map.get(&filename) else {
|
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1006,11 +1031,7 @@ impl LanguageServer for Backend {
|
|||||||
value: format!(
|
value: format!(
|
||||||
"```{}{}```\n{}",
|
"```{}{}```\n{}",
|
||||||
name,
|
name,
|
||||||
if let Some(detail) = &label_details.detail {
|
label_details.detail.clone().unwrap_or_default(),
|
||||||
detail
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
},
|
|
||||||
docs
|
docs
|
||||||
),
|
),
|
||||||
}),
|
}),
|
||||||
@ -1069,7 +1090,7 @@ impl LanguageServer for Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
// Get the current diagnostics for this file.
|
// Get the current diagnostics for this file.
|
||||||
let Some(items) = self.diagnostics_map.get(&filename) else {
|
let Some(diagnostic) = self.diagnostics_map.get(&filename).await else {
|
||||||
// Send an empty report.
|
// Send an empty report.
|
||||||
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||||
RelatedFullDocumentDiagnosticReport {
|
RelatedFullDocumentDiagnosticReport {
|
||||||
@ -1082,21 +1103,13 @@ impl LanguageServer for Backend {
|
|||||||
)));
|
)));
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
Ok(DocumentDiagnosticReportResult::Report(diagnostic.clone()))
|
||||||
RelatedFullDocumentDiagnosticReport {
|
|
||||||
related_documents: None,
|
|
||||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
|
||||||
result_id: None,
|
|
||||||
items: items.clone(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
||||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename) else {
|
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1106,7 +1119,7 @@ impl LanguageServer for Backend {
|
|||||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||||
|
|
||||||
// Let's iterate over the AST and find the node that contains the cursor.
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
let Some(ast) = self.ast_map.get(&filename) else {
|
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1140,7 +1153,7 @@ impl LanguageServer for Backend {
|
|||||||
|
|
||||||
signature.active_parameter = Some(parameter_index);
|
signature.active_parameter = Some(parameter_index);
|
||||||
|
|
||||||
Ok(Some(signature))
|
Ok(Some(signature.clone()))
|
||||||
}
|
}
|
||||||
crate::ast::types::Hover::Comment { value: _, range: _ } => {
|
crate::ast::types::Hover::Comment { value: _, range: _ } => {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
@ -1157,7 +1170,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
|
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1170,7 +1183,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(symbols) = self.symbols_map.get(&filename) else {
|
let Some(symbols) = self.symbols_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1180,7 +1193,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename) else {
|
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1217,7 +1230,7 @@ impl LanguageServer for Backend {
|
|||||||
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
||||||
let filename = params.text_document_position.text_document.uri.to_string();
|
let filename = params.text_document_position.text_document.uri.to_string();
|
||||||
|
|
||||||
let Some(current_code) = self.code_map.get(&filename) else {
|
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||||
@ -1260,7 +1273,7 @@ impl LanguageServer for Backend {
|
|||||||
let filename = params.text_document.uri.to_string();
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
// Get the ast.
|
// Get the ast.
|
||||||
let Some(ast) = self.ast_map.get(&filename) else {
|
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -3,8 +3,7 @@
|
|||||||
pub mod backend;
|
pub mod backend;
|
||||||
pub mod copilot;
|
pub mod copilot;
|
||||||
pub mod kcl;
|
pub mod kcl;
|
||||||
#[cfg(any(test, feature = "lsp-test-util"))]
|
mod safemap;
|
||||||
pub mod test_util;
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
60
src/wasm-lib/kcl/src/lsp/safemap.rs
Normal file
60
src/wasm-lib/kcl/src/lsp/safemap.rs
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
//! A map type that is safe to use in a concurrent environment.
|
||||||
|
//! But also in wasm.
|
||||||
|
//! Previously, we used `dashmap::DashMap` for this purpose, but it doesn't work in wasm.
|
||||||
|
|
||||||
|
use std::{borrow::Borrow, collections::HashMap, hash::Hash, sync::Arc};
|
||||||
|
|
||||||
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
|
/// A thread-safe map type.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SafeMap<K: Eq + Hash + Clone, V: Clone>(Arc<RwLock<HashMap<K, V>>>);
|
||||||
|
|
||||||
|
impl<K: Eq + Hash + Clone, V: Clone> SafeMap<K, V> {
|
||||||
|
/// Create a new empty map.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
SafeMap(Arc::new(RwLock::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn len(&self) -> usize {
|
||||||
|
self.0.read().await.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn is_empty(&self) -> bool {
|
||||||
|
self.0.read().await.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn clear(&self) {
|
||||||
|
self.0.write().await.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a key-value pair into the map.
|
||||||
|
pub async fn insert(&self, key: K, value: V) {
|
||||||
|
self.0.write().await.insert(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the value associated with the given key.
|
||||||
|
pub async fn get<Q>(&self, key: &Q) -> Option<V>
|
||||||
|
where
|
||||||
|
K: Borrow<Q>,
|
||||||
|
Q: Hash + Eq + ?Sized,
|
||||||
|
{
|
||||||
|
self.0.read().await.get(key).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove the key-value pair associated with the given key.
|
||||||
|
pub async fn remove(&self, key: &K) -> Option<V> {
|
||||||
|
self.0.write().await.remove(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to the underlying map.
|
||||||
|
pub async fn inner(&self) -> HashMap<K, V> {
|
||||||
|
self.0.read().await.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K: Eq + Hash + Clone, V: Clone> Default for SafeMap<K, V> {
|
||||||
|
fn default() -> Self {
|
||||||
|
SafeMap::new()
|
||||||
|
}
|
||||||
|
}
|
@ -1,112 +0,0 @@
|
|||||||
use std::sync::{Arc, RwLock};
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use tower_lsp::LanguageServer;
|
|
||||||
|
|
||||||
fn new_zoo_client() -> kittycad::Client {
|
|
||||||
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
|
|
||||||
let http_client = reqwest::Client::builder()
|
|
||||||
.user_agent(user_agent)
|
|
||||||
// For file conversions we need this to be long.
|
|
||||||
.timeout(std::time::Duration::from_secs(600))
|
|
||||||
.connect_timeout(std::time::Duration::from_secs(60));
|
|
||||||
let ws_client = reqwest::Client::builder()
|
|
||||||
.user_agent(user_agent)
|
|
||||||
// For file conversions we need this to be long.
|
|
||||||
.timeout(std::time::Duration::from_secs(600))
|
|
||||||
.connect_timeout(std::time::Duration::from_secs(60))
|
|
||||||
.connection_verbose(true)
|
|
||||||
.tcp_keepalive(std::time::Duration::from_secs(600))
|
|
||||||
.http1_only();
|
|
||||||
|
|
||||||
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
|
|
||||||
|
|
||||||
// Create the client.
|
|
||||||
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
|
|
||||||
// Set a local engine address if it's set.
|
|
||||||
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
|
|
||||||
client.set_base_url(addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
client
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a fake kcl lsp server for testing.
|
|
||||||
pub async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
|
||||||
let stdlib = crate::std::StdLib::new();
|
|
||||||
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
|
||||||
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
|
||||||
|
|
||||||
let zoo_client = new_zoo_client();
|
|
||||||
|
|
||||||
let executor_ctx = if execute {
|
|
||||||
Some(crate::executor::ExecutorContext::new(&zoo_client, Default::default()).await?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let can_execute = executor_ctx.is_some();
|
|
||||||
|
|
||||||
// Create the backend.
|
|
||||||
let (service, _) = tower_lsp::LspService::build(|client| crate::lsp::kcl::Backend {
|
|
||||||
client,
|
|
||||||
fs: Arc::new(crate::fs::FileManager::new()),
|
|
||||||
workspace_folders: Default::default(),
|
|
||||||
stdlib_completions,
|
|
||||||
stdlib_signatures,
|
|
||||||
token_map: Default::default(),
|
|
||||||
ast_map: Default::default(),
|
|
||||||
memory_map: Default::default(),
|
|
||||||
code_map: Default::default(),
|
|
||||||
diagnostics_map: Default::default(),
|
|
||||||
symbols_map: Default::default(),
|
|
||||||
semantic_tokens_map: Default::default(),
|
|
||||||
zoo_client,
|
|
||||||
can_send_telemetry: true,
|
|
||||||
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
|
|
||||||
can_execute: Arc::new(tokio::sync::RwLock::new(can_execute)),
|
|
||||||
is_initialized: Default::default(),
|
|
||||||
})
|
|
||||||
.custom_method("kcl/updateUnits", crate::lsp::kcl::Backend::update_units)
|
|
||||||
.custom_method("kcl/updateCanExecute", crate::lsp::kcl::Backend::update_can_execute)
|
|
||||||
.finish();
|
|
||||||
|
|
||||||
let server = service.inner();
|
|
||||||
|
|
||||||
server
|
|
||||||
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
|
||||||
|
|
||||||
Ok(server.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a fake copilot lsp server for testing.
|
|
||||||
pub async fn copilot_lsp_server() -> Result<crate::lsp::copilot::Backend> {
|
|
||||||
// We don't actually need to authenticate to the backend for this test.
|
|
||||||
let zoo_client = kittycad::Client::new_from_env();
|
|
||||||
|
|
||||||
// Create the backend.
|
|
||||||
let (service, _) = tower_lsp::LspService::new(|client| crate::lsp::copilot::Backend {
|
|
||||||
client,
|
|
||||||
fs: Arc::new(crate::fs::FileManager::new()),
|
|
||||||
workspace_folders: Default::default(),
|
|
||||||
code_map: Default::default(),
|
|
||||||
zoo_client,
|
|
||||||
editor_info: Arc::new(RwLock::new(crate::lsp::copilot::types::CopilotEditorInfo::default())),
|
|
||||||
cache: Arc::new(crate::lsp::copilot::cache::CopilotCache::new()),
|
|
||||||
telemetry: Default::default(),
|
|
||||||
is_initialized: Default::default(),
|
|
||||||
diagnostics_map: Default::default(),
|
|
||||||
});
|
|
||||||
let server = service.inner();
|
|
||||||
|
|
||||||
server
|
|
||||||
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
|
||||||
|
|
||||||
Ok(server.clone())
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -37,7 +37,4 @@ pub fn get_line_before(pos: Position, rope: &Rope) -> Option<String> {
|
|||||||
pub trait IntoDiagnostic {
|
pub trait IntoDiagnostic {
|
||||||
/// Convert the traited object to a [lsp_types::Diagnostic].
|
/// Convert the traited object to a [lsp_types::Diagnostic].
|
||||||
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
|
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
|
||||||
|
|
||||||
/// Get the severity of the diagnostic.
|
|
||||||
fn severity(&self) -> tower_lsp::lsp_types::DiagnosticSeverity;
|
|
||||||
}
|
}
|
||||||
|
@ -474,7 +474,11 @@ fn integer_range(i: TokenSlice) -> PResult<Vec<Value>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
|
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
|
||||||
let key = identifier.context(expected("the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key")).parse_next(i)?;
|
let key = identifier
|
||||||
|
.context(expected(
|
||||||
|
"the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key",
|
||||||
|
))
|
||||||
|
.parse_next(i)?;
|
||||||
colon
|
colon
|
||||||
.context(expected(
|
.context(expected(
|
||||||
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
|
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
|
||||||
@ -584,9 +588,12 @@ fn member_expression_subscript(i: TokenSlice) -> PResult<(LiteralIdentifier, usi
|
|||||||
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
|
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
|
||||||
// This is an identifier, followed by a sequence of members (aka properties)
|
// This is an identifier, followed by a sequence of members (aka properties)
|
||||||
// First, the identifier.
|
// First, the identifier.
|
||||||
let id = identifier.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier")).parse_next(i)?;
|
let id = identifier
|
||||||
|
.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier"))
|
||||||
|
.parse_next(i)?;
|
||||||
// Now a sequence of members.
|
// Now a sequence of members.
|
||||||
let member = alt((member_expression_dot, member_expression_subscript)).context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
let member = alt((member_expression_dot, member_expression_subscript))
|
||||||
|
.context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
||||||
let mut members: Vec<_> = repeat(1.., member)
|
let mut members: Vec<_> = repeat(1.., member)
|
||||||
.context(expected("a sequence of at least one members/properties"))
|
.context(expected("a sequence of at least one members/properties"))
|
||||||
.parse_next(i)?;
|
.parse_next(i)?;
|
||||||
@ -1104,9 +1111,19 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
|
|||||||
// TODO: negation. Original parser doesn't support `not` yet.
|
// TODO: negation. Original parser doesn't support `not` yet.
|
||||||
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
|
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: token.as_source_ranges(),
|
source_ranges: token.as_source_ranges(),
|
||||||
message: format!("{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)", token.value.as_str(),),
|
message: format!(
|
||||||
|
"{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)",
|
||||||
|
token.value.as_str(),
|
||||||
|
),
|
||||||
|
})),
|
||||||
|
other => Err(KclError::Syntax(KclErrorDetails {
|
||||||
|
source_ranges: token.as_source_ranges(),
|
||||||
|
message: format!(
|
||||||
|
"{EXPECTED} but found {} which is {}",
|
||||||
|
token.value.as_str(),
|
||||||
|
other,
|
||||||
|
),
|
||||||
})),
|
})),
|
||||||
other => Err(KclError::Syntax(KclErrorDetails { source_ranges: token.as_source_ranges(), message: format!("{EXPECTED} but found {} which is {}", token.value.as_str(), other,) })),
|
|
||||||
})
|
})
|
||||||
.context(expected("a unary expression, e.g. -x or -3"))
|
.context(expected("a unary expression, e.g. -x or -3"))
|
||||||
.parse_next(i)?;
|
.parse_next(i)?;
|
||||||
@ -1674,7 +1691,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start0.value,
|
start0.value,
|
||||||
NonCodeValue::BlockComment {
|
NonCodeValue::BlockComment {
|
||||||
value: "comment at start".to_owned(),
|
value: "comment at start".to_owned(),
|
||||||
style: CommentStyle::Block
|
style: CommentStyle::Block,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(start1.value, NonCodeValue::NewLine);
|
assert_eq!(start1.value, NonCodeValue::NewLine);
|
||||||
@ -1739,8 +1756,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 32,
|
start: 32,
|
||||||
end: 33,
|
end: 33,
|
||||||
value: 2u32.into(),
|
value: 2u32.into(),
|
||||||
raw: "2".to_owned()
|
raw: "2".to_owned(),
|
||||||
}))
|
})),
|
||||||
})],
|
})],
|
||||||
non_code_meta: NonCodeMeta {
|
non_code_meta: NonCodeMeta {
|
||||||
non_code_nodes: Default::default(),
|
non_code_nodes: Default::default(),
|
||||||
@ -1748,7 +1765,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 7,
|
start: 7,
|
||||||
end: 25,
|
end: 25,
|
||||||
value: NonCodeValue::NewLine
|
value: NonCodeValue::NewLine
|
||||||
}]
|
}],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
@ -1773,7 +1790,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
|
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
|
||||||
NonCodeValue::InlineComment {
|
NonCodeValue::InlineComment {
|
||||||
value: "inline-comment".to_owned(),
|
value: "inline-comment".to_owned(),
|
||||||
style: CommentStyle::Line
|
style: CommentStyle::Line,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert_eq!(body.len(), 4);
|
assert_eq!(body.len(), 4);
|
||||||
@ -1798,8 +1815,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 20,
|
end: 20,
|
||||||
value: NonCodeValue::BlockComment {
|
value: NonCodeValue::BlockComment {
|
||||||
value: "this is a comment".to_owned(),
|
value: "this is a comment".to_owned(),
|
||||||
style: CommentStyle::Line
|
style: CommentStyle::Line,
|
||||||
}
|
},
|
||||||
}],
|
}],
|
||||||
non_code_meta.start,
|
non_code_meta.start,
|
||||||
);
|
);
|
||||||
@ -1810,13 +1827,13 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 82,
|
end: 82,
|
||||||
value: NonCodeValue::InlineComment {
|
value: NonCodeValue::InlineComment {
|
||||||
value: "block\n comment".to_owned(),
|
value: "block\n comment".to_owned(),
|
||||||
style: CommentStyle::Block
|
style: CommentStyle::Block,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
NonCodeNode {
|
NonCodeNode {
|
||||||
start: 82,
|
start: 82,
|
||||||
end: 86,
|
end: 86,
|
||||||
value: NonCodeValue::NewLine
|
value: NonCodeValue::NewLine,
|
||||||
},
|
},
|
||||||
]),
|
]),
|
||||||
non_code_meta.non_code_nodes.get(&0),
|
non_code_meta.non_code_nodes.get(&0),
|
||||||
@ -1827,8 +1844,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
end: 129,
|
end: 129,
|
||||||
value: NonCodeValue::BlockComment {
|
value: NonCodeValue::BlockComment {
|
||||||
value: "this is also a comment".to_owned(),
|
value: "this is also a comment".to_owned(),
|
||||||
style: CommentStyle::Line
|
style: CommentStyle::Line,
|
||||||
}
|
},
|
||||||
}]),
|
}]),
|
||||||
non_code_meta.non_code_nodes.get(&1),
|
non_code_meta.non_code_nodes.get(&1),
|
||||||
);
|
);
|
||||||
@ -1847,7 +1864,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
|
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
|
||||||
NonCodeValue::InlineComment {
|
NonCodeValue::InlineComment {
|
||||||
value: "block\n comment".to_owned(),
|
value: "block\n comment".to_owned(),
|
||||||
style: CommentStyle::Block
|
style: CommentStyle::Block,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -1895,7 +1912,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
|||||||
start: 9,
|
start: 9,
|
||||||
end: 10,
|
end: 10,
|
||||||
value: 3u32.into(),
|
value: 3u32.into(),
|
||||||
raw: "3".to_owned()
|
raw: "3".to_owned(),
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -567,7 +567,7 @@ mod tests {
|
|||||||
project_name: Some("assembly".to_string()),
|
project_name: Some("assembly".to_string()),
|
||||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
|
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None
|
current_file_path: None,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -586,7 +586,7 @@ mod tests {
|
|||||||
project_name: None,
|
project_name: None,
|
||||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
|
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None
|
current_file_path: None,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -624,7 +624,7 @@ mod tests {
|
|||||||
project_name: Some("modeling-app".to_string()),
|
project_name: Some("modeling-app".to_string()),
|
||||||
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
|
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None
|
current_file_path: None,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -642,7 +642,7 @@ mod tests {
|
|||||||
project_name: Some("browser".to_string()),
|
project_name: Some("browser".to_string()),
|
||||||
project_path: "/browser".to_string(),
|
project_path: "/browser".to_string(),
|
||||||
current_file_name: Some("main.kcl".to_string()),
|
current_file_name: Some("main.kcl".to_string()),
|
||||||
current_file_path: Some("/browser/main.kcl".to_string())
|
current_file_path: Some("/browser/main.kcl".to_string()),
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -660,7 +660,7 @@ mod tests {
|
|||||||
project_name: Some("browser".to_string()),
|
project_name: Some("browser".to_string()),
|
||||||
project_path: "/browser".to_string(),
|
project_path: "/browser".to_string(),
|
||||||
current_file_name: None,
|
current_file_name: None,
|
||||||
current_file_path: None
|
current_file_path: None,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -1046,7 +1046,13 @@ const model = import("model.obj")"#
|
|||||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
|
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(result.unwrap_err().to_string(), format!("File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.toml").display()));
|
assert_eq!(
|
||||||
|
result.unwrap_err().to_string(),
|
||||||
|
format!(
|
||||||
|
"File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
||||||
|
tmp_project_dir.join("settings.toml").display()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||||
}
|
}
|
||||||
@ -1061,7 +1067,13 @@ const model = import("model.obj")"#
|
|||||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
|
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(result.unwrap_err().to_string(), format!("File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.docx").display()));
|
assert_eq!(
|
||||||
|
result.unwrap_err().to_string(),
|
||||||
|
format!(
|
||||||
|
"File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
||||||
|
tmp_project_dir.join("settings.docx").display()
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -640,7 +640,7 @@ textWrapping = true
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: Default::default()
|
color: Default::default(),
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -654,15 +654,15 @@ textWrapping = true
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: false.into()
|
enable_ssao: false.into(),
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: true.into(),
|
text_wrapping: true.into(),
|
||||||
blinking_cursor: true.into()
|
blinking_cursor: true.into(),
|
||||||
},
|
},
|
||||||
project: Default::default(),
|
project: Default::default(),
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: true.into()
|
include_settings: true.into(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -698,7 +698,7 @@ includeSettings = false
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into()
|
color: 138.0.into(),
|
||||||
},
|
},
|
||||||
onboarding_status: Default::default(),
|
onboarding_status: Default::default(),
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -712,15 +712,15 @@ includeSettings = false
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into()
|
enable_ssao: true.into(),
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into()
|
blinking_cursor: false.into(),
|
||||||
},
|
},
|
||||||
project: Default::default(),
|
project: Default::default(),
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into()
|
include_settings: false.into(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -761,7 +761,7 @@ defaultProjectName = "projects-$nnn"
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into()
|
color: 138.0.into(),
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -775,18 +775,18 @@ defaultProjectName = "projects-$nnn"
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into()
|
enable_ssao: true.into(),
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into()
|
blinking_cursor: false.into(),
|
||||||
},
|
},
|
||||||
project: ProjectSettings {
|
project: ProjectSettings {
|
||||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||||
default_project_name: "projects-$nnn".to_string().into()
|
default_project_name: "projects-$nnn".to_string().into(),
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into()
|
include_settings: false.into(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -836,7 +836,7 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::System,
|
theme: AppTheme::System,
|
||||||
color: Default::default()
|
color: Default::default(),
|
||||||
},
|
},
|
||||||
onboarding_status: OnboardingStatus::Dismissed,
|
onboarding_status: OnboardingStatus::Dismissed,
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -850,15 +850,15 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: true.into(),
|
highlight_edges: true.into(),
|
||||||
show_debug_panel: false,
|
show_debug_panel: false,
|
||||||
enable_ssao: true.into()
|
enable_ssao: true.into(),
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: true.into(),
|
text_wrapping: true.into(),
|
||||||
blinking_cursor: true.into()
|
blinking_cursor: true.into(),
|
||||||
},
|
},
|
||||||
project: ProjectSettings {
|
project: ProjectSettings {
|
||||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||||
default_project_name: "project-$nnn".to_string().into()
|
default_project_name: "project-$nnn".to_string().into(),
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: true.into()
|
include_settings: true.into()
|
||||||
|
@ -115,7 +115,7 @@ includeSettings = false
|
|||||||
app: AppSettings {
|
app: AppSettings {
|
||||||
appearance: AppearanceSettings {
|
appearance: AppearanceSettings {
|
||||||
theme: AppTheme::Dark,
|
theme: AppTheme::Dark,
|
||||||
color: 138.0.into()
|
color: 138.0.into(),
|
||||||
},
|
},
|
||||||
onboarding_status: Default::default(),
|
onboarding_status: Default::default(),
|
||||||
project_directory: None,
|
project_directory: None,
|
||||||
@ -129,14 +129,14 @@ includeSettings = false
|
|||||||
mouse_controls: Default::default(),
|
mouse_controls: Default::default(),
|
||||||
highlight_edges: Default::default(),
|
highlight_edges: Default::default(),
|
||||||
show_debug_panel: true,
|
show_debug_panel: true,
|
||||||
enable_ssao: true.into()
|
enable_ssao: true.into(),
|
||||||
},
|
},
|
||||||
text_editor: TextEditorSettings {
|
text_editor: TextEditorSettings {
|
||||||
text_wrapping: false.into(),
|
text_wrapping: false.into(),
|
||||||
blinking_cursor: false.into()
|
blinking_cursor: false.into(),
|
||||||
},
|
},
|
||||||
command_bar: CommandBarSettings {
|
command_bar: CommandBarSettings {
|
||||||
include_settings: false.into()
|
include_settings: false.into(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,9 +85,9 @@ async fn inner_chamfer(
|
|||||||
// error to the user that they can only tag one edge at a time.
|
// error to the user that they can only tag one edge at a time.
|
||||||
if tag.is_some() && data.tags.len() > 1 {
|
if tag.is_some() && data.tags.len() > 1 {
|
||||||
return Err(KclError::Type(KclErrorDetails {
|
return Err(KclError::Type(KclErrorDetails {
|
||||||
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
||||||
source_ranges: vec![args.source_range],
|
source_ranges: vec![args.source_range],
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut fillet_or_chamfers = Vec::new();
|
let mut fillet_or_chamfers = Vec::new();
|
||||||
|
@ -314,7 +314,10 @@ fn get_import_format_from_extension(ext: &str) -> Result<kittycad::types::InputF
|
|||||||
} else if ext == "glb" {
|
} else if ext == "glb" {
|
||||||
kittycad::types::FileImportFormat::Gltf
|
kittycad::types::FileImportFormat::Gltf
|
||||||
} else {
|
} else {
|
||||||
anyhow::bail!("unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.", ext)
|
anyhow::bail!(
|
||||||
|
"unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.",
|
||||||
|
ext
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -28,7 +28,7 @@ use schemars::JsonSchema;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{parse_json_number_as_f64, FunctionExpression, TagDeclarator},
|
ast::types::{parse_json_number_as_f64, TagDeclarator},
|
||||||
docs::StdLibFn,
|
docs::StdLibFn,
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::{
|
executor::{
|
||||||
@ -85,7 +85,6 @@ lazy_static! {
|
|||||||
Box::new(crate::std::patterns::PatternLinear3D),
|
Box::new(crate::std::patterns::PatternLinear3D),
|
||||||
Box::new(crate::std::patterns::PatternCircular2D),
|
Box::new(crate::std::patterns::PatternCircular2D),
|
||||||
Box::new(crate::std::patterns::PatternCircular3D),
|
Box::new(crate::std::patterns::PatternCircular3D),
|
||||||
Box::new(crate::std::patterns::PatternTransform),
|
|
||||||
Box::new(crate::std::chamfer::Chamfer),
|
Box::new(crate::std::chamfer::Chamfer),
|
||||||
Box::new(crate::std::fillet::Fillet),
|
Box::new(crate::std::fillet::Fillet),
|
||||||
Box::new(crate::std::fillet::GetOppositeEdge),
|
Box::new(crate::std::fillet::GetOppositeEdge),
|
||||||
@ -352,39 +351,6 @@ impl Args {
|
|||||||
Ok(numbers)
|
Ok(numbers)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_pattern_transform_args(&self) -> Result<(u32, FnAsArg<'_>, ExtrudeGroupSet), KclError> {
|
|
||||||
let sr = vec![self.source_range];
|
|
||||||
let mut args = self.args.iter();
|
|
||||||
let num_repetitions = args.next().ok_or_else(|| {
|
|
||||||
KclError::Type(KclErrorDetails {
|
|
||||||
message: "Missing first argument (should be the number of repetitions)".to_owned(),
|
|
||||||
source_ranges: sr.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
let num_repetitions = num_repetitions.get_u32(sr.clone())?;
|
|
||||||
let transform = args.next().ok_or_else(|| {
|
|
||||||
KclError::Type(KclErrorDetails {
|
|
||||||
message: "Missing second argument (should be the transform function)".to_owned(),
|
|
||||||
source_ranges: sr.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
let func = transform.get_function(sr.clone())?;
|
|
||||||
let eg = args.next().ok_or_else(|| {
|
|
||||||
KclError::Type(KclErrorDetails {
|
|
||||||
message: "Missing third argument (should be a Sketch/ExtrudeGroup or an array of Sketch/ExtrudeGroups)"
|
|
||||||
.to_owned(),
|
|
||||||
source_ranges: sr.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
let eg = eg.get_extrude_group_set().map_err(|_e| {
|
|
||||||
KclError::Type(KclErrorDetails {
|
|
||||||
message: "Third argument was not an ExtrudeGroup".to_owned(),
|
|
||||||
source_ranges: sr.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
Ok((num_repetitions, func, eg))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_hypotenuse_leg(&self) -> Result<(f64, f64), KclError> {
|
fn get_hypotenuse_leg(&self) -> Result<(f64, f64), KclError> {
|
||||||
let numbers = self.get_number_array()?;
|
let numbers = self.get_number_array()?;
|
||||||
|
|
||||||
@ -1276,11 +1242,6 @@ pub enum Primitive {
|
|||||||
Uuid,
|
Uuid,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FnAsArg<'a> {
|
|
||||||
pub func: &'a crate::executor::MemoryFunction,
|
|
||||||
pub expr: Box<FunctionExpression>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
|
@ -8,11 +8,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::{
|
executor::{ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, SketchGroup, SketchGroupSet},
|
||||||
ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, Point3d, ProgramReturn, SketchGroup,
|
|
||||||
SketchGroupSet, SourceRange, UserVal,
|
|
||||||
},
|
|
||||||
function_param::FunctionParam,
|
|
||||||
std::{types::Uint, Args},
|
std::{types::Uint, Args},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -74,233 +70,6 @@ impl LinearPattern {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A linear pattern
|
|
||||||
/// Each element in the pattern repeats a particular piece of geometry.
|
|
||||||
/// The repetitions can be transformed by the `transform` parameter.
|
|
||||||
pub async fn pattern_transform(args: Args) -> Result<MemoryItem, KclError> {
|
|
||||||
let (num_repetitions, transform, extr) = args.get_pattern_transform_args()?;
|
|
||||||
|
|
||||||
let extrude_groups = inner_pattern_transform(
|
|
||||||
num_repetitions,
|
|
||||||
FunctionParam {
|
|
||||||
inner: transform.func,
|
|
||||||
fn_expr: transform.expr,
|
|
||||||
meta: vec![args.source_range.into()],
|
|
||||||
ctx: args.ctx.clone(),
|
|
||||||
memory: args.current_program_memory.clone(),
|
|
||||||
},
|
|
||||||
extr,
|
|
||||||
&args,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(MemoryItem::ExtrudeGroups { value: extrude_groups })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A linear pattern on a 3D solid.
|
|
||||||
/// Each repetition of the pattern can be transformed (e.g. scaled, translated, hidden, etc).
|
|
||||||
///
|
|
||||||
/// ```no_run
|
|
||||||
/// // Parameters
|
|
||||||
/// const r = 50 // base radius
|
|
||||||
/// const h = 10 // layer height
|
|
||||||
/// const t = 0.005 // taper factor [0-1)
|
|
||||||
/// // Defines how to modify each layer of the vase.
|
|
||||||
/// // Each replica is shifted up the Z axis, and has a smoothly-varying radius
|
|
||||||
/// fn transform = (replicaId) => {
|
|
||||||
/// let scale = r * abs(1 - (t * replicaId)) * (5 + cos(replicaId / 8))
|
|
||||||
/// return {
|
|
||||||
/// translate: [0, 0, replicaId * 10],
|
|
||||||
/// scale: [scale, scale, 0],
|
|
||||||
/// }
|
|
||||||
/// }
|
|
||||||
/// // Each layer is just a pretty thin cylinder.
|
|
||||||
/// fn layer = () => {
|
|
||||||
/// return startSketchOn("XY") // or some other plane idk
|
|
||||||
/// |> circle([0, 0], 1, %, 'tag1')
|
|
||||||
/// |> extrude(h, %)
|
|
||||||
/// }
|
|
||||||
/// // The vase is 100 layers tall.
|
|
||||||
/// // The 100 layers are replica of each other, with a slight transformation applied to each.
|
|
||||||
/// let vase = layer() |> patternTransform(100, transform, %)
|
|
||||||
/// ```
|
|
||||||
#[stdlib {
|
|
||||||
name = "patternTransform",
|
|
||||||
}]
|
|
||||||
async fn inner_pattern_transform<'a>(
|
|
||||||
num_repetitions: u32,
|
|
||||||
transform_function: FunctionParam<'a>,
|
|
||||||
extrude_group_set: ExtrudeGroupSet,
|
|
||||||
args: &'a Args,
|
|
||||||
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
|
|
||||||
// Build the vec of transforms, one for each repetition.
|
|
||||||
let mut transform = Vec::new();
|
|
||||||
for i in 0..num_repetitions {
|
|
||||||
let t = make_transform(i, &transform_function, args.source_range).await?;
|
|
||||||
transform.push(t);
|
|
||||||
}
|
|
||||||
// Flush the batch for our fillets/chamfers if there are any.
|
|
||||||
// If we do not flush these, then you won't be able to pattern something with fillets.
|
|
||||||
// Flush just the fillets/chamfers that apply to these extrude groups.
|
|
||||||
args.flush_batch_for_extrude_group_set(extrude_group_set.clone().into())
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let starting_extrude_groups: Vec<Box<ExtrudeGroup>> = extrude_group_set.into();
|
|
||||||
|
|
||||||
if args.ctx.is_mock {
|
|
||||||
return Ok(starting_extrude_groups);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut extrude_groups = Vec::new();
|
|
||||||
for e in starting_extrude_groups {
|
|
||||||
let new_extrude_groups = send_pattern_transform(transform.clone(), &e, args).await?;
|
|
||||||
extrude_groups.extend(new_extrude_groups);
|
|
||||||
}
|
|
||||||
Ok(extrude_groups)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn send_pattern_transform(
|
|
||||||
// This should be passed via reference, see
|
|
||||||
// https://github.com/KittyCAD/modeling-app/issues/2821
|
|
||||||
transform: Vec<kittycad::types::LinearTransform>,
|
|
||||||
extrude_group: &ExtrudeGroup,
|
|
||||||
args: &Args,
|
|
||||||
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
|
|
||||||
let id = uuid::Uuid::new_v4();
|
|
||||||
|
|
||||||
let resp = args
|
|
||||||
.send_modeling_cmd(
|
|
||||||
id,
|
|
||||||
ModelingCmd::EntityLinearPatternTransform {
|
|
||||||
entity_id: extrude_group.id,
|
|
||||||
transform,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let kittycad::types::OkWebSocketResponseData::Modeling {
|
|
||||||
modeling_response: kittycad::types::OkModelingCmdResponse::EntityLinearPatternTransform { data: pattern_info },
|
|
||||||
} = &resp
|
|
||||||
else {
|
|
||||||
return Err(KclError::Engine(KclErrorDetails {
|
|
||||||
message: format!("EntityLinearPattern response was not as expected: {:?}", resp),
|
|
||||||
source_ranges: vec![args.source_range],
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut geometries = vec![Box::new(extrude_group.clone())];
|
|
||||||
for id in pattern_info.entity_ids.iter() {
|
|
||||||
let mut new_extrude_group = extrude_group.clone();
|
|
||||||
new_extrude_group.id = *id;
|
|
||||||
geometries.push(Box::new(new_extrude_group));
|
|
||||||
}
|
|
||||||
Ok(geometries)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn make_transform<'a>(
|
|
||||||
i: u32,
|
|
||||||
transform_function: &FunctionParam<'a>,
|
|
||||||
source_range: SourceRange,
|
|
||||||
) -> Result<kittycad::types::LinearTransform, KclError> {
|
|
||||||
// Call the transform fn for this repetition.
|
|
||||||
let repetition_num = MemoryItem::UserVal(UserVal {
|
|
||||||
value: serde_json::Value::Number(i.into()),
|
|
||||||
meta: vec![source_range.into()],
|
|
||||||
});
|
|
||||||
let transform_fn_args = vec![repetition_num];
|
|
||||||
let transform_fn_return = transform_function.call(transform_fn_args).await?.0;
|
|
||||||
|
|
||||||
// Unpack the returned transform object.
|
|
||||||
let source_ranges = vec![source_range];
|
|
||||||
let transform_fn_return = transform_fn_return.ok_or_else(|| {
|
|
||||||
KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "Transform function must return a value".to_string(),
|
|
||||||
source_ranges: source_ranges.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
let ProgramReturn::Value(transform_fn_return) = transform_fn_return else {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "Transform function must return a value".to_string(),
|
|
||||||
source_ranges: source_ranges.clone(),
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
let MemoryItem::UserVal(transform) = transform_fn_return else {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "Transform function must return a transform object".to_string(),
|
|
||||||
source_ranges: source_ranges.clone(),
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Apply defaults to the transform.
|
|
||||||
let replicate = match transform.value.get("replicate") {
|
|
||||||
Some(serde_json::Value::Bool(true)) => true,
|
|
||||||
Some(serde_json::Value::Bool(false)) => false,
|
|
||||||
Some(_) => {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "The 'replicate' key must be a bool".to_string(),
|
|
||||||
source_ranges: source_ranges.clone(),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
None => true,
|
|
||||||
};
|
|
||||||
let scale = match transform.value.get("scale") {
|
|
||||||
Some(x) => array_to_point3d(x, source_ranges.clone())?,
|
|
||||||
None => Point3d { x: 1.0, y: 1.0, z: 1.0 },
|
|
||||||
};
|
|
||||||
let translate = match transform.value.get("translate") {
|
|
||||||
Some(x) => array_to_point3d(x, source_ranges.clone())?,
|
|
||||||
None => Point3d { x: 0.0, y: 0.0, z: 0.0 },
|
|
||||||
};
|
|
||||||
let t = kittycad::types::LinearTransform {
|
|
||||||
replicate,
|
|
||||||
scale: Some(scale.into()),
|
|
||||||
translate: Some(translate.into()),
|
|
||||||
};
|
|
||||||
Ok(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn array_to_point3d(json: &serde_json::Value, source_ranges: Vec<SourceRange>) -> Result<Point3d, KclError> {
|
|
||||||
let serde_json::Value::Array(arr) = dbg!(json) else {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: "Expected an array of 3 numbers (i.e. a 3D point)".to_string(),
|
|
||||||
source_ranges,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
let len = arr.len();
|
|
||||||
if len != 3 {
|
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: format!("Expected an array of 3 numbers (i.e. a 3D point) but found {len} items"),
|
|
||||||
source_ranges,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
// Gets an f64 from a JSON value, returns Option.
|
|
||||||
let f = |j: &serde_json::Value| j.as_number().and_then(|num| num.as_f64()).map(|x| x.to_owned());
|
|
||||||
let err = |component| {
|
|
||||||
KclError::Semantic(KclErrorDetails {
|
|
||||||
message: format!("{component} component of this point was not a number"),
|
|
||||||
source_ranges: source_ranges.clone(),
|
|
||||||
})
|
|
||||||
};
|
|
||||||
let x = f(&arr[0]).ok_or_else(|| err("X"))?;
|
|
||||||
let y = f(&arr[1]).ok_or_else(|| err("Y"))?;
|
|
||||||
let z = f(&arr[2]).ok_or_else(|| err("Z"))?;
|
|
||||||
Ok(Point3d { x, y, z })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_array_to_point3d() {
|
|
||||||
let input = serde_json::json! {
|
|
||||||
[1.1, 2.2, 3.3]
|
|
||||||
};
|
|
||||||
let expected = Point3d { x: 1.1, y: 2.2, z: 3.3 };
|
|
||||||
let actual = array_to_point3d(&input, Vec::new());
|
|
||||||
assert_eq!(actual.unwrap(), expected);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A linear pattern on a 2D sketch.
|
/// A linear pattern on a 2D sketch.
|
||||||
pub async fn pattern_linear_2d(args: Args) -> Result<MemoryItem, KclError> {
|
pub async fn pattern_linear_2d(args: Args) -> Result<MemoryItem, KclError> {
|
||||||
let (data, sketch_group_set): (LinearPattern2dData, SketchGroupSet) = args.get_data_and_sketch_group_set()?;
|
let (data, sketch_group_set): (LinearPattern2dData, SketchGroupSet) = args.get_data_and_sketch_group_set()?;
|
||||||
|
@ -431,7 +431,7 @@ mod tests {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if let Err(err) = result {
|
if let Err(err) = result {
|
||||||
assert!(err.to_string().contains("Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
assert!(err.to_string().contains( "Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
||||||
} else {
|
} else {
|
||||||
panic!("Expected error");
|
panic!("Expected error");
|
||||||
}
|
}
|
||||||
|
@ -1,296 +0,0 @@
|
|||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
ast::types::{
|
|
||||||
BinaryPart, BodyItem, LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, ObjectProperty,
|
|
||||||
Parameter, Program, UnaryExpression, Value, VariableDeclarator,
|
|
||||||
},
|
|
||||||
walk::Node,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Walker is implemented by things that are able to walk an AST tree to
|
|
||||||
/// produce lints. This trait is implemented automatically for a few of the
|
|
||||||
/// common types, but can be manually implemented too.
|
|
||||||
pub trait Walker<'a> {
|
|
||||||
/// Walk will visit every element of the AST.
|
|
||||||
fn walk(&self, n: Node<'a>) -> Result<bool>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, FnT> Walker<'a> for FnT
|
|
||||||
where
|
|
||||||
FnT: Fn(Node<'a>) -> Result<bool>,
|
|
||||||
{
|
|
||||||
fn walk(&self, n: Node<'a>) -> Result<bool> {
|
|
||||||
self(n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Run the Walker against all [Node]s in a [Program].
|
|
||||||
pub fn walk<'a, WalkT>(prog: &'a Program, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(prog.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
for bi in &prog.body {
|
|
||||||
if !walk_body_item(bi, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_variable_declarator<'a, WalkT>(node: &'a VariableDeclarator, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
if !f.walk((&node.id).into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_value(&node.init, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_parameter<'a, WalkT>(node: &'a Parameter, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
f.walk((&node.identifier).into())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_member_object<'a, WalkT>(node: &'a MemberObject, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
f.walk(node.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_literal_identifier<'a, WalkT>(node: &'a LiteralIdentifier, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
f.walk(node.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_member_expression<'a, WalkT>(node: &'a MemberExpression, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !walk_member_object(&node.object, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
walk_literal_identifier(&node.property, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_binary_part<'a, WalkT>(node: &'a BinaryPart, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
match node {
|
|
||||||
BinaryPart::Literal(lit) => f.walk(lit.as_ref().into()),
|
|
||||||
BinaryPart::Identifier(id) => f.walk(id.as_ref().into()),
|
|
||||||
BinaryPart::BinaryExpression(be) => f.walk(be.as_ref().into()),
|
|
||||||
BinaryPart::CallExpression(ce) => f.walk(ce.as_ref().into()),
|
|
||||||
BinaryPart::UnaryExpression(ue) => walk_unary_expression(ue, f),
|
|
||||||
BinaryPart::MemberExpression(me) => walk_member_expression(me, f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn walk_value<'a, WalkT>(node: &'a Value, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
match node {
|
|
||||||
Value::Literal(lit) => f.walk(lit.as_ref().into()),
|
|
||||||
Value::TagDeclarator(tag) => f.walk(tag.as_ref().into()),
|
|
||||||
|
|
||||||
Value::Identifier(id) => {
|
|
||||||
// sometimes there's a bare Identifier without a Value::Identifier.
|
|
||||||
f.walk(id.as_ref().into())
|
|
||||||
}
|
|
||||||
|
|
||||||
Value::BinaryExpression(be) => {
|
|
||||||
if !f.walk(be.as_ref().into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
if !walk_binary_part(&be.left, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_binary_part(&be.right, f)
|
|
||||||
}
|
|
||||||
Value::FunctionExpression(fe) => {
|
|
||||||
if !f.walk(fe.as_ref().into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
for arg in &fe.params {
|
|
||||||
if !walk_parameter(arg, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
walk(&fe.body, f)
|
|
||||||
}
|
|
||||||
Value::CallExpression(ce) => {
|
|
||||||
if !f.walk(ce.as_ref().into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !f.walk((&ce.callee).into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
for e in &ce.arguments {
|
|
||||||
if !walk_value::<WalkT>(e, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
Value::PipeExpression(pe) => {
|
|
||||||
if !f.walk(pe.as_ref().into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
for e in &pe.body {
|
|
||||||
if !walk_value::<WalkT>(e, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
Value::PipeSubstitution(ps) => f.walk(ps.as_ref().into()),
|
|
||||||
Value::ArrayExpression(ae) => {
|
|
||||||
if !f.walk(ae.as_ref().into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
for e in &ae.elements {
|
|
||||||
if !walk_value::<WalkT>(e, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
Value::ObjectExpression(oe) => walk_object_expression(oe, f),
|
|
||||||
Value::MemberExpression(me) => walk_member_expression(me, f),
|
|
||||||
Value::UnaryExpression(ue) => walk_unary_expression(ue, f),
|
|
||||||
Value::None(_) => Ok(true),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Walk through an [ObjectProperty].
|
|
||||||
fn walk_object_property<'a, WalkT>(node: &'a ObjectProperty, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_value(&node.value, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Walk through an [ObjectExpression].
|
|
||||||
fn walk_object_expression<'a, WalkT>(node: &'a ObjectExpression, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
for prop in &node.properties {
|
|
||||||
if !walk_object_property(prop, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// walk through an [UnaryExpression].
|
|
||||||
fn walk_unary_expression<'a, WalkT>(node: &'a UnaryExpression, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
if !f.walk(node.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_binary_part(&node.argument, f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// walk through a [BodyItem].
|
|
||||||
fn walk_body_item<'a, WalkT>(node: &'a BodyItem, f: &WalkT) -> Result<bool>
|
|
||||||
where
|
|
||||||
WalkT: Walker<'a>,
|
|
||||||
{
|
|
||||||
// We don't walk a BodyItem since it's an enum itself.
|
|
||||||
|
|
||||||
match node {
|
|
||||||
BodyItem::ExpressionStatement(xs) => {
|
|
||||||
if !f.walk(xs.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_value(&xs.expression, f)
|
|
||||||
}
|
|
||||||
BodyItem::VariableDeclaration(vd) => {
|
|
||||||
if !f.walk(vd.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
for dec in &vd.declarations {
|
|
||||||
if !walk_variable_declarator(dec, f)? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
BodyItem::ReturnStatement(rs) => {
|
|
||||||
if !f.walk(rs.into())? {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
walk_value(&rs.argument, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
macro_rules! kcl {
|
|
||||||
( $kcl:expr ) => {{
|
|
||||||
let tokens = $crate::token::lexer($kcl).unwrap();
|
|
||||||
let parser = $crate::parser::Parser::new(tokens);
|
|
||||||
parser.ast().unwrap()
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn stop_walking() {
|
|
||||||
let program = kcl!(
|
|
||||||
"
|
|
||||||
const foo = 1
|
|
||||||
const bar = 2
|
|
||||||
"
|
|
||||||
);
|
|
||||||
|
|
||||||
walk(&program, &|node| {
|
|
||||||
if let Node::VariableDeclarator(vd) = node {
|
|
||||||
if vd.id.name == "foo" {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
panic!("walk didn't stop");
|
|
||||||
}
|
|
||||||
Ok(true)
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user