Compare commits
15 Commits
refactor-l
...
remove-the
Author | SHA1 | Date | |
---|---|---|---|
f46edcddf3 | |||
68fd921a64 | |||
a20e710e8f | |||
9daf2d7794 | |||
f86473d13b | |||
6fccc68c18 | |||
ade66d0876 | |||
b5f3a067ee | |||
bb9d24f821 | |||
bd3cd97d74 | |||
1b5839a7f8 | |||
a9e480f0ed | |||
63fa04608c | |||
0d4d7fa751 | |||
68cdb68231 |
4
.github/workflows/cargo-bench.yml
vendored
4
.github/workflows/cargo-bench.yml
vendored
@ -38,5 +38,7 @@ jobs:
|
||||
- name: Benchmark kcl library
|
||||
shell: bash
|
||||
run: |-
|
||||
cd src/wasm-lib/kcl; cargo bench -- iai
|
||||
cd src/wasm-lib/kcl; cargo bench --all-features -- iai
|
||||
env:
|
||||
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN}}
|
||||
|
||||
|
27
.github/workflows/playwright.yml
vendored
27
.github/workflows/playwright.yml
vendored
@ -38,6 +38,8 @@ jobs:
|
||||
runs-on: ubuntu-latest-8-cores
|
||||
needs: check-rust-changes
|
||||
steps:
|
||||
- name: Tune GitHub-hosted runner network
|
||||
uses: smorimoto/tune-github-hosted-runner-network@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
@ -90,14 +92,17 @@ jobs:
|
||||
- name: build web
|
||||
run: yarn build:local
|
||||
- name: Run ubuntu/chrome snapshots
|
||||
continue-on-error: true
|
||||
run: |
|
||||
yarn playwright test --project="Google Chrome" --update-snapshots e2e/playwright/snapshot-tests.spec.ts
|
||||
# remove test-results, messes with retry logic
|
||||
rm -r test-results
|
||||
env:
|
||||
CI: true
|
||||
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }}
|
||||
- name: Clean up test-results
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: rm -r test-results
|
||||
- name: check for changes
|
||||
id: git-check
|
||||
run: |
|
||||
@ -124,7 +129,7 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: steps.git-check.outputs.modified == 'true'
|
||||
with:
|
||||
name: playwright-report-ubuntu
|
||||
name: playwright-report-ubuntu-${{ github.sha }}
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
# if have previous run results, use them
|
||||
@ -132,7 +137,7 @@ jobs:
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: test-results-ubuntu
|
||||
name: test-results-ubuntu-${{ github.sha }}
|
||||
path: test-results/
|
||||
- name: Run ubuntu/chrome flow retry failures
|
||||
id: retry
|
||||
@ -158,23 +163,25 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: test-results-ubuntu
|
||||
name: test-results-ubuntu-${{ github.sha }}
|
||||
path: test-results/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-report-ubuntu
|
||||
name: playwright-report-ubuntu-${{ github.sha }}
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
|
||||
playwright-macos:
|
||||
timeout-minutes: 60
|
||||
runs-on: macos-14
|
||||
runs-on: macos-14-large
|
||||
needs: check-rust-changes
|
||||
steps:
|
||||
- name: Tune GitHub-hosted runner network
|
||||
uses: smorimoto/tune-github-hosted-runner-network@v1
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
@ -232,7 +239,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
with:
|
||||
name: test-results-macos
|
||||
name: test-results-macos-${{ github.sha }}
|
||||
path: test-results/
|
||||
- name: Run macos/safari flow retry failures
|
||||
id: retry
|
||||
@ -260,14 +267,14 @@ jobs:
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: test-results-macos
|
||||
name: test-results-macos-${{ github.sha }}
|
||||
path: test-results/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: playwright-report-macos
|
||||
name: playwright-report-macos-${{ github.sha }}
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
|
@ -55,6 +55,7 @@ layout: manual
|
||||
* [`patternCircular3d`](kcl/patternCircular3d)
|
||||
* [`patternLinear2d`](kcl/patternLinear2d)
|
||||
* [`patternLinear3d`](kcl/patternLinear3d)
|
||||
* [`patternTransform`](kcl/patternTransform)
|
||||
* [`pi`](kcl/pi)
|
||||
* [`pow`](kcl/pow)
|
||||
* [`profileStart`](kcl/profileStart)
|
||||
|
356
docs/kcl/patternTransform.md
Normal file
356
docs/kcl/patternTransform.md
Normal file
File diff suppressed because one or more lines are too long
4230
docs/kcl/std.json
4230
docs/kcl/std.json
File diff suppressed because it is too large
Load Diff
@ -1214,12 +1214,18 @@ test('Auto complete works', async ({ page }) => {
|
||||
await page.waitForTimeout(100)
|
||||
// press arrow down twice then enter to accept xLine
|
||||
await page.keyboard.press('ArrowDown')
|
||||
await page.waitForTimeout(100)
|
||||
await page.keyboard.press('ArrowDown')
|
||||
await page.waitForTimeout(100)
|
||||
await page.keyboard.press('Enter')
|
||||
await page.waitForTimeout(100)
|
||||
// finish line with comment
|
||||
await page.keyboard.type('5')
|
||||
await page.waitForTimeout(100)
|
||||
await page.keyboard.press('Tab')
|
||||
await page.waitForTimeout(100)
|
||||
await page.keyboard.press('Tab')
|
||||
await page.waitForTimeout(100)
|
||||
await page.keyboard.type(' // lin')
|
||||
await page.waitForTimeout(100)
|
||||
// there shouldn't be any auto complete options for 'lin' in the comment
|
||||
@ -1689,6 +1695,7 @@ test.describe('Onboarding tests', () => {
|
||||
})
|
||||
|
||||
test.describe('Testing selections', () => {
|
||||
test.setTimeout(90_000)
|
||||
test('Selections work on fresh and edited sketch', async ({ page }) => {
|
||||
// tests mapping works on fresh sketch and edited sketch
|
||||
// tests using hovers which is the same as selections, because if
|
||||
@ -1894,6 +1901,239 @@ test.describe('Testing selections', () => {
|
||||
await selectionSequence()
|
||||
})
|
||||
|
||||
test('Solids should be select and deletable', async ({ page }) => {
|
||||
test.setTimeout(90_000)
|
||||
const u = await getUtils(page)
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-79.26, 95.04], %)
|
||||
|> line([112.54, 127.64], %, $seg02)
|
||||
|> line([170.36, -121.61], %, $seg01)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(50, sketch001)
|
||||
const sketch005 = startSketchOn(extrude001, 'END')
|
||||
|> startProfileAt([23.24, 136.52], %)
|
||||
|> line([-8.44, 36.61], %)
|
||||
|> line([49.4, 2.05], %)
|
||||
|> line([29.69, -46.95], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const sketch003 = startSketchOn(extrude001, seg01)
|
||||
|> startProfileAt([21.23, 17.81], %)
|
||||
|> line([51.97, 21.32], %)
|
||||
|> line([4.07, -22.75], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const sketch002 = startSketchOn(extrude001, seg02)
|
||||
|> startProfileAt([-100.54, 16.99], %)
|
||||
|> line([0, 20.03], %)
|
||||
|> line([62.61, 0], %, $seg03)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude002 = extrude(50, sketch002)
|
||||
const sketch004 = startSketchOn(extrude002, seg03)
|
||||
|> startProfileAt([57.07, 134.77], %)
|
||||
|> line([-4.72, 22.84], %)
|
||||
|> line([28.8, 6.71], %)
|
||||
|> line([9.19, -25.33], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude003 = extrude(20, sketch004)
|
||||
const pipeLength = 40
|
||||
const pipeSmallDia = 10
|
||||
const pipeLargeDia = 20
|
||||
const thickness = 0.5
|
||||
const part009 = startSketchOn('XY')
|
||||
|> startProfileAt([pipeLargeDia - (thickness / 2), 38], %)
|
||||
|> line([thickness, 0], %)
|
||||
|> line([0, -1], %)
|
||||
|> angledLineToX({
|
||||
angle: 60,
|
||||
to: pipeSmallDia + thickness
|
||||
}, %)
|
||||
|> line([0, -pipeLength], %)
|
||||
|> angledLineToX({
|
||||
angle: -60,
|
||||
to: pipeLargeDia + thickness
|
||||
}, %)
|
||||
|> line([0, -1], %)
|
||||
|> line([-thickness, 0], %)
|
||||
|> line([0, 1], %)
|
||||
|> angledLineToX({ angle: 120, to: pipeSmallDia }, %)
|
||||
|> line([0, pipeLength], %)
|
||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||
|> close(%)
|
||||
const rev = revolve({ axis: 'y' }, part009)
|
||||
`
|
||||
)
|
||||
}, KCL_DEFAULT_LENGTH)
|
||||
await page.setViewportSize({ width: 1000, height: 500 })
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await u.openDebugPanel()
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]')
|
||||
await u.closeDebugPanel()
|
||||
|
||||
await u.openAndClearDebugPanel()
|
||||
await u.sendCustomCmd({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_look_at',
|
||||
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
|
||||
center: { x: -2206.68, y: -1298.36, z: 60 },
|
||||
up: { x: 0, y: 0, z: 1 },
|
||||
},
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
await u.sendCustomCmd({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_get_settings',
|
||||
},
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
|
||||
const revolve = { x: 646, y: 248 }
|
||||
const parentExtrude = { x: 915, y: 133 }
|
||||
const solid2d = { x: 770, y: 167 }
|
||||
|
||||
// DELETE REVOLVE
|
||||
await page.mouse.click(revolve.x, revolve.y)
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
||||
'|> line([0, -pipeLength], %)'
|
||||
)
|
||||
await u.clearCommandLogs()
|
||||
await page.keyboard.press('Backspace')
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
||||
await page.waitForTimeout(200)
|
||||
|
||||
await expect(u.codeLocator).not.toContainText(
|
||||
`const rev = revolve({ axis: 'y' }, part009)`
|
||||
)
|
||||
|
||||
// DELETE PARENT EXTRUDE
|
||||
await page.mouse.click(parentExtrude.x, parentExtrude.y)
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
||||
'|> line([170.36, -121.61], %, $seg01)'
|
||||
)
|
||||
await u.clearCommandLogs()
|
||||
await page.keyboard.press('Backspace')
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(u.codeLocator).not.toContainText(
|
||||
`const extrude001 = extrude(50, sketch001)`
|
||||
)
|
||||
await expect(u.codeLocator).toContainText(`const sketch005 = startSketchOn({
|
||||
plane: {
|
||||
origin: { x: 0, y: -50, z: 0 },
|
||||
x_axis: { x: 1, y: 0, z: 0 },
|
||||
y_axis: { x: 0, y: 0, z: 1 },
|
||||
z_axis: { x: 0, y: -1, z: 0 }
|
||||
}
|
||||
})`)
|
||||
await expect(u.codeLocator).toContainText(`const sketch003 = startSketchOn({
|
||||
plane: {
|
||||
origin: { x: 116.53, y: 0, z: 163.25 },
|
||||
x_axis: { x: -0.81, y: 0, z: 0.58 },
|
||||
y_axis: { x: 0, y: -1, z: 0 },
|
||||
z_axis: { x: 0.58, y: 0, z: 0.81 }
|
||||
}
|
||||
})`)
|
||||
await expect(u.codeLocator).toContainText(`const sketch002 = startSketchOn({
|
||||
plane: {
|
||||
origin: { x: -91.74, y: 0, z: 80.89 },
|
||||
x_axis: { x: -0.66, y: 0, z: -0.75 },
|
||||
y_axis: { x: 0, y: -1, z: 0 },
|
||||
z_axis: { x: -0.75, y: 0, z: 0.66 }
|
||||
}
|
||||
})`)
|
||||
|
||||
// DELETE SOLID 2D
|
||||
await page.mouse.click(solid2d.x, solid2d.y)
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
||||
'|> startProfileAt([23.24, 136.52], %)'
|
||||
)
|
||||
await u.clearCommandLogs()
|
||||
await page.keyboard.press('Backspace')
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(u.codeLocator).not.toContainText(
|
||||
`const sketch005 = startSketchOn({`
|
||||
)
|
||||
})
|
||||
test("Deleting solid that the AST mod can't handle results in a toast message", async ({
|
||||
page,
|
||||
}) => {
|
||||
const u = await getUtils(page)
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-79.26, 95.04], %)
|
||||
|> line([112.54, 127.64], %, $seg02)
|
||||
|> line([170.36, -121.61], %, $seg01)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(50, sketch001)
|
||||
const launderExtrudeThroughVar = extrude001
|
||||
const sketch002 = startSketchOn(launderExtrudeThroughVar, seg02)
|
||||
|> startProfileAt([-100.54, 16.99], %)
|
||||
|> line([0, 20.03], %)
|
||||
|> line([62.61, 0], %, $seg03)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
`
|
||||
)
|
||||
}, KCL_DEFAULT_LENGTH)
|
||||
await page.setViewportSize({ width: 1000, height: 500 })
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await u.openDebugPanel()
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]', 10_000)
|
||||
await u.closeDebugPanel()
|
||||
|
||||
await u.openAndClearDebugPanel()
|
||||
await u.sendCustomCmd({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_look_at',
|
||||
vantage: { x: 1139.49, y: -7053, z: 8597.31 },
|
||||
center: { x: -2206.68, y: -1298.36, z: 60 },
|
||||
up: { x: 0, y: 0, z: 1 },
|
||||
},
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
await u.sendCustomCmd({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_get_settings',
|
||||
},
|
||||
})
|
||||
await page.waitForTimeout(100)
|
||||
|
||||
// attempt delete
|
||||
await page.mouse.click(930, 139)
|
||||
await page.waitForTimeout(100)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(
|
||||
'|> line([170.36, -121.61], %, $seg01)'
|
||||
)
|
||||
await u.clearCommandLogs()
|
||||
await page.keyboard.press('Backspace')
|
||||
|
||||
await expect(page.getByText('Unable to delete part')).toBeVisible()
|
||||
})
|
||||
test('Hovering over 3d features highlights code', async ({ page }) => {
|
||||
const u = await getUtils(page)
|
||||
await page.addInitScript(async (KCL_DEFAULT_LENGTH) => {
|
||||
@ -2121,6 +2361,104 @@ const part001 = startSketchOn('XZ')
|
||||
)
|
||||
}
|
||||
})
|
||||
test("Hovering and selection of extruded faces works, and is not overridden shortly after user's click", async ({
|
||||
page,
|
||||
}) => {
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-79.26, 95.04], %)
|
||||
|> line([112.54, 127.64], %)
|
||||
|> line([170.36, -121.61], %, $seg01)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(50, sketch001)
|
||||
`
|
||||
)
|
||||
})
|
||||
const u = await getUtils(page)
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
await page.goto('/')
|
||||
await u.waitForAuthSkipAppStart()
|
||||
await u.openAndClearDebugPanel()
|
||||
|
||||
await u.sendCustomCmd({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_look_at',
|
||||
vantage: { x: 6615, y: -9505, z: 10344 },
|
||||
center: { x: 1579, y: -635, z: 4035 },
|
||||
up: { x: 0, y: 0, z: 1 },
|
||||
},
|
||||
})
|
||||
await u.waitForCmdReceive('default_camera_look_at')
|
||||
await u.clearAndCloseDebugPanel()
|
||||
|
||||
await page.waitForTimeout(1000)
|
||||
|
||||
const isMac = process.platform === 'darwin'
|
||||
|
||||
let noHoverColor: [number, number, number] = [82, 82, 82]
|
||||
let hoverColor: [number, number, number] = [116, 116, 116]
|
||||
let selectColor: [number, number, number] = [144, 148, 97]
|
||||
|
||||
const extrudeWall = { x: 670, y: 275 }
|
||||
const extrudeText = `line([170.36, -121.61], %, $seg01)`
|
||||
|
||||
const cap = { x: 594, y: 283 }
|
||||
const capText = `startProfileAt([-79.26, 95.04], %)`
|
||||
|
||||
const nothing = { x: 946, y: 229 }
|
||||
|
||||
expect(await u.getGreatestPixDiff(extrudeWall, noHoverColor)).toBeLessThan(
|
||||
5
|
||||
)
|
||||
await page.mouse.move(nothing.x, nothing.y)
|
||||
await page.waitForTimeout(100)
|
||||
await page.mouse.move(extrudeWall.x, extrudeWall.y)
|
||||
await expect(page.getByTestId('hover-highlight')).toBeVisible()
|
||||
await expect(page.getByTestId('hover-highlight')).toContainText(extrudeText)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(
|
||||
await u.getGreatestPixDiff(extrudeWall, hoverColor)
|
||||
).toBeLessThan(5)
|
||||
await page.mouse.click(extrudeWall.x, extrudeWall.y)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${extrudeText}`)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(
|
||||
await u.getGreatestPixDiff(extrudeWall, selectColor)
|
||||
).toBeLessThan(5)
|
||||
await page.waitForTimeout(1000)
|
||||
// check color stays there, i.e. not overridden (this was a bug previously)
|
||||
await expect(
|
||||
await u.getGreatestPixDiff(extrudeWall, selectColor)
|
||||
).toBeLessThan(5)
|
||||
|
||||
await page.mouse.move(nothing.x, nothing.y)
|
||||
await page.waitForTimeout(300)
|
||||
await expect(page.getByTestId('hover-highlight')).not.toBeVisible()
|
||||
|
||||
// because of shading, color is not exact everywhere on the face
|
||||
noHoverColor = [104, 104, 104]
|
||||
hoverColor = [134, 134, 134]
|
||||
selectColor = [158, 162, 110]
|
||||
|
||||
await expect(await u.getGreatestPixDiff(cap, noHoverColor)).toBeLessThan(5)
|
||||
await page.mouse.move(cap.x, cap.y)
|
||||
await expect(page.getByTestId('hover-highlight')).toBeVisible()
|
||||
await expect(page.getByTestId('hover-highlight')).toContainText(capText)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(await u.getGreatestPixDiff(cap, hoverColor)).toBeLessThan(5)
|
||||
await page.mouse.click(cap.x, cap.y)
|
||||
await expect(page.locator('.cm-activeLine')).toHaveText(`|> ${capText}`)
|
||||
await page.waitForTimeout(200)
|
||||
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
|
||||
await page.waitForTimeout(1000)
|
||||
// check color stays there, i.e. not overridden (this was a bug previously)
|
||||
await expect(await u.getGreatestPixDiff(cap, selectColor)).toBeLessThan(5)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Command bar tests', () => {
|
||||
@ -2139,10 +2477,10 @@ test.describe('Command bar tests', () => {
|
||||
.or(page.getByRole('button', { name: '⌘K' }))
|
||||
.click()
|
||||
|
||||
let cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||
let cmdSearchBar = page.getByPlaceholder('Search commands')
|
||||
await expect(cmdSearchBar).toBeVisible()
|
||||
await page.keyboard.press('Escape')
|
||||
cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||
cmdSearchBar = page.getByPlaceholder('Search commands')
|
||||
await expect(cmdSearchBar).not.toBeVisible()
|
||||
|
||||
// Now try the same, but with the keyboard shortcut, check focus
|
||||
@ -2151,7 +2489,7 @@ test.describe('Command bar tests', () => {
|
||||
} else {
|
||||
await page.locator('html').press('Control+C')
|
||||
}
|
||||
cmdSearchBar = await page.getByPlaceholder('Search commands')
|
||||
cmdSearchBar = page.getByPlaceholder('Search commands')
|
||||
await expect(cmdSearchBar).toBeVisible()
|
||||
await expect(cmdSearchBar).toBeFocused()
|
||||
|
||||
@ -2532,9 +2870,6 @@ fn yohey = (pos) => {
|
||||
|
||||
await page.getByText(selectionsSnippets.extrudeAndEditBlocked).click()
|
||||
await expect(page.getByRole('button', { name: 'Extrude' })).toBeDisabled()
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'Edit Sketch' })
|
||||
).not.toBeVisible()
|
||||
|
||||
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
||||
await expect(page.getByRole('button', { name: 'Extrude' })).not.toBeDisabled()
|
||||
@ -2559,10 +2894,14 @@ fn yohey = (pos) => {
|
||||
// selecting an editable sketch but clicking "start sketch" should start a new sketch and not edit the existing one
|
||||
await page.getByText(selectionsSnippets.extrudeAndEditAllowed).click()
|
||||
await page.getByRole('button', { name: 'Start Sketch' }).click()
|
||||
await page.waitForTimeout(200)
|
||||
await page.getByTestId('KCL Code').click()
|
||||
await page.waitForTimeout(200)
|
||||
await page.mouse.click(734, 134)
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByTestId('KCL Code').click()
|
||||
// expect main content to contain `sketch005` i.e. started a new sketch
|
||||
await page.waitForTimeout(300)
|
||||
await expect(page.locator('.cm-content')).toHaveText(
|
||||
/sketch001 = startSketchOn\('XZ'\)/
|
||||
)
|
||||
@ -2846,7 +3185,7 @@ async function doEditSegmentsByDraggingHandle(page: Page, openPanes: string[]) {
|
||||
}
|
||||
|
||||
test.describe('Can edit segments by dragging their handles', () => {
|
||||
test('code pane open at start', async ({ page }) => {
|
||||
test('code pane open at start-handles', async ({ page }) => {
|
||||
// Load the app with the code panes
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
@ -2862,7 +3201,7 @@ test.describe('Can edit segments by dragging their handles', () => {
|
||||
await doEditSegmentsByDraggingHandle(page, ['code'])
|
||||
})
|
||||
|
||||
test('code pane closed at start', async ({ page }) => {
|
||||
test('code pane closed at start-handles', async ({ page }) => {
|
||||
// Load the app with the code panes
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
@ -3180,6 +3519,7 @@ test.describe('Snap to close works (at any scale)', () => {
|
||||
})
|
||||
|
||||
test('Sketch on face', async ({ page }) => {
|
||||
test.setTimeout(90_000)
|
||||
const u = await getUtils(page)
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
@ -5373,6 +5713,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
||||
)
|
||||
|
||||
await page.getByTestId('overlay-menu').click()
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByText('Delete Segment').click()
|
||||
|
||||
await page.getByText('Cancel').click()
|
||||
@ -5385,6 +5726,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
||||
)
|
||||
|
||||
await page.getByTestId('overlay-menu').click()
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByText('Delete Segment').click()
|
||||
|
||||
await page.getByText('Continue and unconstrain').last().click()
|
||||
@ -5533,6 +5875,7 @@ ${extraLine ? 'const myVar = segLen(seg01, part001)' : ''}`
|
||||
await expect(page.locator('.cm-content')).toContainText(before)
|
||||
|
||||
await page.getByTestId('overlay-menu').click()
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByText('Remove constraints').click()
|
||||
|
||||
await expect(page.locator('.cm-content')).toContainText(after)
|
||||
@ -5706,8 +6049,8 @@ test('Basic default modeling and sketch hotkeys work', async ({ page }) => {
|
||||
await expect(extrudeButton).not.toBeDisabled()
|
||||
await page.keyboard.press('e')
|
||||
await page.waitForTimeout(100)
|
||||
await page.mouse.move(900, 200, { steps: 5 })
|
||||
await page.mouse.click(900, 200)
|
||||
await page.mouse.move(800, 200, { steps: 5 })
|
||||
await page.mouse.click(800, 200)
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByRole('button', { name: 'Continue' }).click()
|
||||
await page.getByRole('button', { name: 'Submit command' }).click()
|
||||
|
@ -45,8 +45,8 @@ async function clearCommandLogs(page: Page) {
|
||||
await page.getByTestId('clear-commands').click()
|
||||
}
|
||||
|
||||
async function expectCmdLog(page: Page, locatorStr: string) {
|
||||
await expect(page.locator(locatorStr).last()).toBeVisible()
|
||||
async function expectCmdLog(page: Page, locatorStr: string, timeout = 5000) {
|
||||
await expect(page.locator(locatorStr).last()).toBeVisible({ timeout })
|
||||
}
|
||||
|
||||
async function waitForDefaultPlanesToBeVisible(page: Page) {
|
||||
@ -228,7 +228,8 @@ export async function getUtils(page: Page) {
|
||||
await fillInput('z', xyz[2])
|
||||
},
|
||||
clearCommandLogs: () => clearCommandLogs(page),
|
||||
expectCmdLog: (locatorStr: string) => expectCmdLog(page, locatorStr),
|
||||
expectCmdLog: (locatorStr: string, timeout = 5000) =>
|
||||
expectCmdLog(page, locatorStr, timeout),
|
||||
openKclCodePanel: () => openKclCodePanel(page),
|
||||
closeKclCodePanel: () => closeKclCodePanel(page),
|
||||
openDebugPanel: () => openDebugPanel(page),
|
||||
@ -300,11 +301,19 @@ export async function getUtils(page: Page) {
|
||||
(screenshot.width * coords.y * pixMultiplier +
|
||||
coords.x * pixMultiplier) *
|
||||
4 // rbga is 4 channels
|
||||
return Math.max(
|
||||
const maxDiff = Math.max(
|
||||
Math.abs(screenshot.data[index] - expected[0]),
|
||||
Math.abs(screenshot.data[index + 1] - expected[1]),
|
||||
Math.abs(screenshot.data[index + 2] - expected[2])
|
||||
)
|
||||
if (maxDiff > 4) {
|
||||
console.log(
|
||||
`Expected: ${expected} Actual: [${screenshot.data[index]}, ${
|
||||
screenshot.data[index + 1]
|
||||
}, ${screenshot.data[index + 2]}]`
|
||||
)
|
||||
}
|
||||
return maxDiff
|
||||
},
|
||||
doAndWaitForImageDiff: (fn: () => Promise<any>, diffCount = 200) =>
|
||||
new Promise(async (resolve) => {
|
||||
|
@ -12,6 +12,7 @@
|
||||
"@headlessui/tailwindcss": "^0.2.0",
|
||||
"@kittycad/lib": "^0.0.67",
|
||||
"@lezer/javascript": "^1.4.9",
|
||||
"@open-rpc/client-js": "^1.8.1",
|
||||
"@react-hook/resize-observer": "^2.0.1",
|
||||
"@replit/codemirror-interact": "^6.3.1",
|
||||
"@tauri-apps/api": "2.0.0-beta.12",
|
||||
@ -41,7 +42,7 @@
|
||||
"fuse.js": "^7.0.0",
|
||||
"html2canvas-pro": "^1.4.3",
|
||||
"http-server": "^14.1.1",
|
||||
"json-rpc-2.0": "^1.7.0",
|
||||
"json-rpc-2.0": "^1.6.0",
|
||||
"jszip": "^3.10.1",
|
||||
"node-fetch": "^3.3.2",
|
||||
"re-resizable": "^6.9.11",
|
||||
@ -61,8 +62,7 @@
|
||||
"ua-parser-js": "^1.0.37",
|
||||
"uuid": "^9.0.1",
|
||||
"vitest": "^1.6.0",
|
||||
"vscode-languageclient": "^9.0.1",
|
||||
"vscode-languageserver": "^9.0.1",
|
||||
"vscode-jsonrpc": "^8.2.1",
|
||||
"vscode-languageserver-protocol": "^3.17.5",
|
||||
"wasm-pack": "^0.12.1",
|
||||
"web-vitals": "^3.5.2",
|
||||
@ -89,7 +89,7 @@
|
||||
"fmt-check": "prettier --check ./src *.ts *.json *.js ./e2e",
|
||||
"fetch:wasm": "./get-latest-wasm-bundle.sh",
|
||||
"build:wasm-dev": "(cd src/wasm-lib && wasm-pack build --dev --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
||||
"build:wasm": "(cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
||||
"build:wasm": "(cd src/wasm-lib && wasm-pack build --release --target web --out-dir pkg && cargo test -p kcl-lib export_bindings) && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn fmt",
|
||||
"build:wasm-clean": "yarn wasm-prep && yarn build:wasm",
|
||||
"remove-importmeta": "sed -i 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\"; sed -i '' 's/import.meta.url/window.location.origin/g' \"./src/wasm-lib/pkg/wasm_lib.js\" || echo \"sed for both mac and linux\"",
|
||||
"wasm-prep": "rm -rf src/wasm-lib/pkg && mkdir src/wasm-lib/pkg && rm -rf src/wasm-lib/kcl/bindings",
|
||||
|
4
src-tauri/Cargo.lock
generated
4
src-tauri/Cargo.lock
generated
@ -4546,9 +4546,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.116"
|
||||
version = "1.0.118"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
|
||||
checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4"
|
||||
dependencies = [
|
||||
"indexmap 2.2.6",
|
||||
"itoa 1.0.11",
|
||||
|
@ -25,6 +25,7 @@ import { LowerRightControls } from 'components/LowerRightControls'
|
||||
import ModalContainer from 'react-modal-promise'
|
||||
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
||||
import Gizmo from 'components/Gizmo'
|
||||
import { CoreDumpManager } from 'lib/coredump'
|
||||
|
||||
export function App() {
|
||||
useRefreshSettings(paths.FILE + 'SETTINGS')
|
||||
@ -55,7 +56,11 @@ export function App() {
|
||||
setHtmlRef(ref)
|
||||
}, [ref])
|
||||
|
||||
const { settings } = useSettingsAuthContext()
|
||||
const { auth, settings } = useSettingsAuthContext()
|
||||
const token = auth?.context?.token
|
||||
|
||||
const coreDumpManager = new CoreDumpManager(engineCommandManager, ref, token)
|
||||
|
||||
const {
|
||||
app: { onboardingStatus },
|
||||
} = settings.context
|
||||
@ -129,7 +134,7 @@ export function App() {
|
||||
<ModelingSidebar paneOpacity={paneOpacity} />
|
||||
<Stream />
|
||||
{/* <CamToggle /> */}
|
||||
<LowerRightControls>
|
||||
<LowerRightControls coreDumpManager={coreDumpManager}>
|
||||
<Gizmo />
|
||||
</LowerRightControls>
|
||||
</div>
|
||||
|
@ -534,7 +534,7 @@ export class SceneEntities {
|
||||
segmentName: 'line' | 'tangentialArcTo' = 'line',
|
||||
shouldTearDown = true
|
||||
) => {
|
||||
const _ast = JSON.parse(JSON.stringify(kclManager.ast))
|
||||
const _ast = kclManager.ast
|
||||
|
||||
const _node1 = getNodeFromPath<VariableDeclaration>(
|
||||
_ast,
|
||||
@ -692,7 +692,7 @@ export class SceneEntities {
|
||||
sketchOrigin: [number, number, number],
|
||||
rectangleOrigin: [x: number, y: number]
|
||||
) => {
|
||||
let _ast = JSON.parse(JSON.stringify(kclManager.ast))
|
||||
let _ast = kclManager.ast
|
||||
|
||||
const _node1 = getNodeFromPath<VariableDeclaration>(
|
||||
_ast,
|
||||
@ -723,7 +723,9 @@ export class SceneEntities {
|
||||
...getRectangleCallExpressions(rectangleOrigin, tags),
|
||||
])
|
||||
|
||||
_ast = parse(recast(_ast))
|
||||
let result = parse(recast(_ast))
|
||||
if (trap(result)) return Promise.reject(result)
|
||||
_ast = result
|
||||
|
||||
const { programMemoryOverride, truncatedAst } = await this.setupSketch({
|
||||
sketchPathToNode,
|
||||
@ -737,7 +739,7 @@ export class SceneEntities {
|
||||
sceneInfra.setCallbacks({
|
||||
onMove: async (args) => {
|
||||
// Update the width and height of the draft rectangle
|
||||
const pathToNodeTwo = JSON.parse(JSON.stringify(sketchPathToNode))
|
||||
const pathToNodeTwo = sketchPathToNode
|
||||
pathToNodeTwo[1][0] = 0
|
||||
|
||||
const _node = getNodeFromPath<VariableDeclaration>(
|
||||
@ -799,7 +801,9 @@ export class SceneEntities {
|
||||
if (sketchInit.type === 'PipeExpression') {
|
||||
updateRectangleSketch(sketchInit, x, y, tags[0])
|
||||
|
||||
_ast = parse(recast(_ast))
|
||||
let result = parse(recast(_ast))
|
||||
if (trap(result)) return Promise.reject(result)
|
||||
_ast = result
|
||||
|
||||
// Update the primary AST and unequip the rectangle tool
|
||||
await kclManager.executeAstMock(_ast)
|
||||
@ -1003,10 +1007,8 @@ export class SceneEntities {
|
||||
PROFILE_START,
|
||||
])
|
||||
if (!group) return
|
||||
const pathToNode: PathToNode = JSON.parse(
|
||||
JSON.stringify(group.userData.pathToNode)
|
||||
)
|
||||
const varDecIndex = JSON.parse(JSON.stringify(pathToNode[1][0]))
|
||||
const pathToNode: PathToNode = group.userData.pathToNode
|
||||
const varDecIndex: number = pathToNode[1][0] as number
|
||||
if (draftInfo) {
|
||||
pathToNode[1][0] = 0
|
||||
}
|
||||
@ -1719,7 +1721,7 @@ function prepareTruncatedMemoryAndAst(
|
||||
}
|
||||
| Error {
|
||||
const bodyIndex = Number(sketchPathToNode?.[1]?.[0]) || 0
|
||||
const _ast = JSON.parse(JSON.stringify(ast))
|
||||
const _ast = ast
|
||||
|
||||
const _node = getNodeFromPath<VariableDeclaration>(
|
||||
_ast,
|
||||
@ -1778,7 +1780,7 @@ function prepareTruncatedMemoryAndAst(
|
||||
}
|
||||
const truncatedAst: Program = {
|
||||
..._ast,
|
||||
body: [JSON.parse(JSON.stringify(_ast.body[bodyIndex]))],
|
||||
body: [_ast.body[bodyIndex]],
|
||||
}
|
||||
const programMemoryOverride = programMemoryInit()
|
||||
if (err(programMemoryOverride)) return programMemoryOverride
|
||||
@ -1804,7 +1806,7 @@ function prepareTruncatedMemoryAndAst(
|
||||
}
|
||||
|
||||
if (value.type === 'TagIdentifier') {
|
||||
programMemoryOverride.root[key] = JSON.parse(JSON.stringify(value))
|
||||
programMemoryOverride.root[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
@ -1819,7 +1821,7 @@ function prepareTruncatedMemoryAndAst(
|
||||
if (!memoryItem) {
|
||||
continue
|
||||
}
|
||||
programMemoryOverride.root[name] = JSON.parse(JSON.stringify(memoryItem))
|
||||
programMemoryOverride.root[name] = memoryItem
|
||||
}
|
||||
return {
|
||||
truncatedAst,
|
||||
@ -1967,9 +1969,9 @@ export async function getSketchOrientationDetails(
|
||||
* @param entityId - The ID of the entity for which orientation details are being fetched.
|
||||
* @returns A promise that resolves with the orientation details of the face.
|
||||
*/
|
||||
async function getFaceDetails(
|
||||
export async function getFaceDetails(
|
||||
entityId: string
|
||||
): Promise<Models['FaceIsPlanar_type']> {
|
||||
): Promise<Models['GetSketchModePlane_type']> {
|
||||
// TODO mode engine connection to allow batching returns and batch the following
|
||||
await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
@ -1982,8 +1984,7 @@ async function getFaceDetails(
|
||||
entity_id: entityId,
|
||||
},
|
||||
})
|
||||
// TODO change typing to get_sketch_mode_plane once lib is updated
|
||||
const faceInfo: Models['FaceIsPlanar_type'] = (
|
||||
const faceInfo: Models['GetSketchModePlane_type'] = (
|
||||
await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
|
@ -151,9 +151,7 @@ export function useCalc({
|
||||
ast,
|
||||
engineCommandManager,
|
||||
useFakeExecutor: true,
|
||||
programMemoryOverride: JSON.parse(
|
||||
JSON.stringify(kclManager.programMemory)
|
||||
),
|
||||
programMemoryOverride: kclManager.programMemory,
|
||||
}).then(({ programMemory }) => {
|
||||
const resultDeclaration = ast.body.find(
|
||||
(a) =>
|
||||
|
@ -6,8 +6,18 @@ import { NetworkHealthIndicator } from 'components/NetworkHealthIndicator'
|
||||
import { HelpMenu } from './HelpMenu'
|
||||
import { Link, useLocation } from 'react-router-dom'
|
||||
import { useAbsoluteFilePath } from 'hooks/useAbsoluteFilePath'
|
||||
import { coreDump } from 'lang/wasm'
|
||||
import toast from 'react-hot-toast'
|
||||
import { CoreDumpManager } from 'lib/coredump'
|
||||
import openWindow from 'lib/openWindow'
|
||||
|
||||
export function LowerRightControls(props: React.PropsWithChildren) {
|
||||
export function LowerRightControls({
|
||||
children,
|
||||
coreDumpManager,
|
||||
}: {
|
||||
children?: React.ReactNode
|
||||
coreDumpManager?: CoreDumpManager
|
||||
}) {
|
||||
const location = useLocation()
|
||||
const filePath = useAbsoluteFilePath()
|
||||
const linkOverrideClassName =
|
||||
@ -15,9 +25,42 @@ export function LowerRightControls(props: React.PropsWithChildren) {
|
||||
|
||||
const isPlayWright = window?.localStorage.getItem('playwright') === 'true'
|
||||
|
||||
async function reportbug(event: { preventDefault: () => void }) {
|
||||
event?.preventDefault()
|
||||
|
||||
if (!coreDumpManager) {
|
||||
// open default reporting option
|
||||
openWindow('https://github.com/KittyCAD/modeling-app/issues/new/choose')
|
||||
} else {
|
||||
toast
|
||||
.promise(
|
||||
coreDump(coreDumpManager, true),
|
||||
{
|
||||
loading: 'Preparing bug report...',
|
||||
success: 'Bug report opened in new window',
|
||||
error: 'Unable to export a core dump. Using default reporting.',
|
||||
},
|
||||
{
|
||||
success: {
|
||||
// Note: this extended duration is especially important for Playwright e2e testing
|
||||
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
|
||||
duration: 6000,
|
||||
},
|
||||
}
|
||||
)
|
||||
.catch((err: Error) => {
|
||||
if (err) {
|
||||
openWindow(
|
||||
'https://github.com/KittyCAD/modeling-app/issues/new/choose'
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<section className="fixed bottom-2 right-2 flex flex-col items-end gap-3 pointer-events-none">
|
||||
{props.children}
|
||||
{children}
|
||||
<menu className="flex items-center justify-end gap-3 pointer-events-auto">
|
||||
<a
|
||||
href={`https://github.com/KittyCAD/modeling-app/releases/tag/v${APP_VERSION}`}
|
||||
@ -28,6 +71,7 @@ export function LowerRightControls(props: React.PropsWithChildren) {
|
||||
v{isPlayWright ? '11.22.33' : APP_VERSION}
|
||||
</a>
|
||||
<a
|
||||
onClick={reportbug}
|
||||
href="https://github.com/KittyCAD/modeling-app/issues/new/choose"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
|
@ -7,7 +7,8 @@ import React, {
|
||||
useContext,
|
||||
useState,
|
||||
} from 'react'
|
||||
import LspServerClient from '../editor/plugins/lsp/client'
|
||||
import { FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
||||
import Client from '../editor/plugins/lsp/client'
|
||||
import { TEST, VITE_KC_API_BASE_URL } from 'env'
|
||||
import kclLanguage from 'editor/plugins/lsp/kcl/language'
|
||||
import { copilotPlugin } from 'editor/plugins/lsp/copilot'
|
||||
@ -18,7 +19,9 @@ import { LanguageSupport } from '@codemirror/language'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { paths } from 'lib/paths'
|
||||
import { FileEntry } from 'lib/types'
|
||||
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
||||
import {
|
||||
LspWorkerEventType,
|
||||
KclWorkerOptions,
|
||||
CopilotWorkerOptions,
|
||||
LspWorker,
|
||||
@ -27,6 +30,7 @@ import { wasmUrl } from 'lang/wasm'
|
||||
import { PROJECT_ENTRYPOINT } from 'lib/constants'
|
||||
import { useNetworkContext } from 'hooks/useNetworkContext'
|
||||
import { NetworkHealthState } from 'hooks/useNetworkStatus'
|
||||
import { err } from 'lib/trap'
|
||||
|
||||
function getWorkspaceFolders(): LSP.WorkspaceFolder[] {
|
||||
return []
|
||||
@ -103,23 +107,32 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return { lspClient: null }
|
||||
}
|
||||
|
||||
const options: KclWorkerOptions = {
|
||||
const lspWorker = new Worker({ name: 'kcl' })
|
||||
const initEvent: KclWorkerOptions = {
|
||||
wasmUrl: wasmUrl(),
|
||||
token: token,
|
||||
baseUnit: defaultUnit.current,
|
||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||
callback: () => {
|
||||
setIsLspReady(true)
|
||||
},
|
||||
wasmUrl: wasmUrl(),
|
||||
}
|
||||
lspWorker.postMessage({
|
||||
worker: LspWorker.Kcl,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: initEvent,
|
||||
})
|
||||
lspWorker.onmessage = function (e) {
|
||||
if (err(fromServer)) return
|
||||
fromServer.add(e.data)
|
||||
}
|
||||
|
||||
const lsp = new LspServerClient({ worker: LspWorker.Kcl, options })
|
||||
lsp.startServer()
|
||||
const intoServer: IntoServer = new IntoServer(LspWorker.Kcl, lspWorker)
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
if (err(fromServer)) return { lspClient: null }
|
||||
|
||||
const lspClient = new LanguageServerClient({
|
||||
client: lsp,
|
||||
name: LspWorker.Kcl,
|
||||
})
|
||||
const client = new Client(fromServer, intoServer)
|
||||
|
||||
setIsLspReady(true)
|
||||
|
||||
const lspClient = new LanguageServerClient({ client, name: LspWorker.Kcl })
|
||||
return { lspClient }
|
||||
}, [
|
||||
// We need a token for authenticating the server.
|
||||
@ -172,19 +185,32 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
|
||||
return { lspClient: null }
|
||||
}
|
||||
|
||||
const options: CopilotWorkerOptions = {
|
||||
const lspWorker = new Worker({ name: 'copilot' })
|
||||
const initEvent: CopilotWorkerOptions = {
|
||||
wasmUrl: wasmUrl(),
|
||||
token: token,
|
||||
apiBaseUrl: VITE_KC_API_BASE_URL,
|
||||
callback: () => {
|
||||
setIsCopilotReady(true)
|
||||
},
|
||||
wasmUrl: wasmUrl(),
|
||||
}
|
||||
const lsp = new LspServerClient({ worker: LspWorker.Copilot, options })
|
||||
lsp.startServer()
|
||||
lspWorker.postMessage({
|
||||
worker: LspWorker.Copilot,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: initEvent,
|
||||
})
|
||||
lspWorker.onmessage = function (e) {
|
||||
if (err(fromServer)) return
|
||||
fromServer.add(e.data)
|
||||
}
|
||||
|
||||
const intoServer: IntoServer = new IntoServer(LspWorker.Copilot, lspWorker)
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
if (err(fromServer)) return { lspClient: null }
|
||||
|
||||
const client = new Client(fromServer, intoServer)
|
||||
|
||||
setIsCopilotReady(true)
|
||||
|
||||
const lspClient = new LanguageServerClient({
|
||||
client: lsp,
|
||||
client,
|
||||
name: LspWorker.Copilot,
|
||||
})
|
||||
return { lspClient }
|
||||
|
@ -23,6 +23,7 @@ import {
|
||||
editorManager,
|
||||
sceneEntitiesManager,
|
||||
} from 'lib/singletons'
|
||||
import { useHotkeys } from 'react-hotkeys-hook'
|
||||
import { applyConstraintHorzVertDistance } from './Toolbar/SetHorzVertDistance'
|
||||
import {
|
||||
angleBetweenInfo,
|
||||
@ -78,6 +79,7 @@ import { getVarNameModal } from 'hooks/useToolbarGuards'
|
||||
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
||||
import { uuidv4 } from 'lib/utils'
|
||||
import { err, trap } from 'lib/trap'
|
||||
import { useCommandsContext } from 'hooks/useCommandsContext'
|
||||
|
||||
type MachineContext<T extends AnyStateMachine> = {
|
||||
state: StateFrom<T>
|
||||
@ -124,7 +126,6 @@ export const ModelingMachineProvider = ({
|
||||
token
|
||||
)
|
||||
useHotkeyWrapper(['meta + shift + .'], () => {
|
||||
console.warn('CoreDump: Initializing core dump')
|
||||
toast.promise(
|
||||
coreDump(coreDumpManager, true),
|
||||
{
|
||||
@ -141,6 +142,7 @@ export const ModelingMachineProvider = ({
|
||||
}
|
||||
)
|
||||
})
|
||||
const { commandBarState } = useCommandsContext()
|
||||
|
||||
// Settings machine setup
|
||||
// const retrievedSettings = useRef(
|
||||
@ -326,6 +328,11 @@ export const ModelingMachineProvider = ({
|
||||
)
|
||||
updateSceneObjectColors()
|
||||
|
||||
// side effect to stop code mirror from updating the same selections again
|
||||
editorManager.lastSelection = selections.codeBasedSelections
|
||||
.map(({ range }) => `${range[1]}->${range[1]}`)
|
||||
.join('&')
|
||||
|
||||
return {
|
||||
selectionRanges: selections,
|
||||
}
|
||||
@ -460,6 +467,11 @@ export const ModelingMachineProvider = ({
|
||||
|
||||
return canExtrudeSelection(selectionRanges)
|
||||
},
|
||||
'has valid selection for deletion': ({ selectionRanges }) => {
|
||||
if (!commandBarState.matches('Closed')) return false
|
||||
if (selectionRanges.codeBasedSelections.length <= 0) return false
|
||||
return true
|
||||
},
|
||||
'Sketch is empty': ({ sketchDetails }) => {
|
||||
const node = getNodeFromPath<VariableDeclaration>(
|
||||
kclManager.ast,
|
||||
@ -501,7 +513,7 @@ export const ModelingMachineProvider = ({
|
||||
services: {
|
||||
'AST-undo-startSketchOn': async ({ sketchDetails }) => {
|
||||
if (!sketchDetails) return
|
||||
const newAst: Program = JSON.parse(JSON.stringify(kclManager.ast))
|
||||
const newAst: Program = kclManager.ast
|
||||
const varDecIndex = sketchDetails.sketchPathToNode[1][0]
|
||||
// remove body item at varDecIndex
|
||||
newAst.body = newAst.body.filter((_, i) => i !== varDecIndex)
|
||||
@ -923,6 +935,11 @@ export const ModelingMachineProvider = ({
|
||||
}
|
||||
}, [modelingSend])
|
||||
|
||||
// Allow using the delete key to delete solids
|
||||
useHotkeys(['backspace', 'delete', 'del'], () => {
|
||||
modelingSend({ type: 'Delete selection' })
|
||||
})
|
||||
|
||||
useStateMachineCommands({
|
||||
machineId: 'modeling',
|
||||
state: modelingState,
|
||||
|
@ -1,7 +1,25 @@
|
||||
import { coreDump } from 'lang/wasm'
|
||||
import { CoreDumpManager } from 'lib/coredump'
|
||||
import { CustomIcon } from './CustomIcon'
|
||||
import { engineCommandManager } from 'lib/singletons'
|
||||
import React from 'react'
|
||||
import toast from 'react-hot-toast'
|
||||
import Tooltip from './Tooltip'
|
||||
import { useStore } from 'useStore'
|
||||
import { useSettingsAuthContext } from 'hooks/useSettingsAuthContext'
|
||||
|
||||
export const RefreshButton = ({ children }: React.PropsWithChildren) => {
|
||||
const { auth } = useSettingsAuthContext()
|
||||
const token = auth?.context?.token
|
||||
const { htmlRef } = useStore((s) => ({
|
||||
htmlRef: s.htmlRef,
|
||||
}))
|
||||
const coreDumpManager = new CoreDumpManager(
|
||||
engineCommandManager,
|
||||
htmlRef,
|
||||
token
|
||||
)
|
||||
|
||||
export function RefreshButton() {
|
||||
async function refresh() {
|
||||
if (window && 'plausible' in window) {
|
||||
const p = window.plausible as (
|
||||
@ -17,8 +35,26 @@ export function RefreshButton() {
|
||||
})
|
||||
}
|
||||
|
||||
// Window may not be available in some environments
|
||||
window?.location.reload()
|
||||
toast
|
||||
.promise(
|
||||
coreDump(coreDumpManager, true),
|
||||
{
|
||||
loading: 'Starting core dump...',
|
||||
success: 'Core dump completed successfully',
|
||||
error: 'Error while exporting core dump',
|
||||
},
|
||||
{
|
||||
success: {
|
||||
// Note: this extended duration is especially important for Playwright e2e testing
|
||||
// default duration is 2000 - https://react-hot-toast.com/docs/toast#default-durations
|
||||
duration: 6000,
|
||||
},
|
||||
}
|
||||
)
|
||||
.then(() => {
|
||||
// Window may not be available in some environments
|
||||
window?.location.reload()
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
|
@ -83,6 +83,7 @@ export const Stream = ({ className = '' }: { className?: string }) => {
|
||||
if (!videoRef.current) return
|
||||
if (state.matches('Sketch')) return
|
||||
if (state.matches('Sketch no face')) return
|
||||
|
||||
const { x, y } = getNormalisedCoordinates({
|
||||
clientX: e.clientX,
|
||||
clientY: e.clientY,
|
||||
|
@ -145,7 +145,7 @@ export async function applyConstraintIntersect({
|
||||
const { transforms, forcedSelectionRanges } = info
|
||||
|
||||
const transform1 = transformSecondarySketchLinesTagFirst({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges: forcedSelectionRanges,
|
||||
transformInfos: transforms,
|
||||
programMemory: kclManager.programMemory,
|
||||
|
@ -106,7 +106,7 @@ export async function applyConstraintAbsDistance({
|
||||
const transformInfos = info.transforms
|
||||
|
||||
const transform1 = transformAstSketchLines({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges: selectionRanges,
|
||||
transformInfos,
|
||||
programMemory: kclManager.programMemory,
|
||||
@ -128,7 +128,7 @@ export async function applyConstraintAbsDistance({
|
||||
)
|
||||
|
||||
const transform2 = transformAstSketchLines({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges: selectionRanges,
|
||||
transformInfos,
|
||||
programMemory: kclManager.programMemory,
|
||||
@ -176,7 +176,7 @@ export function applyConstraintAxisAlign({
|
||||
let finalValue = createIdentifier('ZERO')
|
||||
|
||||
return transformAstSketchLines({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges: selectionRanges,
|
||||
transformInfos,
|
||||
programMemory: kclManager.programMemory,
|
||||
|
@ -100,7 +100,7 @@ export async function applyConstraintAngleBetween({
|
||||
const transformInfos = info.transforms
|
||||
|
||||
const transformed1 = transformSecondarySketchLinesTagFirst({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges,
|
||||
transformInfos,
|
||||
programMemory: kclManager.programMemory,
|
||||
|
@ -108,7 +108,7 @@ export async function applyConstraintHorzVertDistance({
|
||||
if (err(info)) return Promise.reject(info)
|
||||
const transformInfos = info.transforms
|
||||
const transformed = transformSecondarySketchLinesTagFirst({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges,
|
||||
transformInfos,
|
||||
programMemory: kclManager.programMemory,
|
||||
|
@ -84,7 +84,7 @@ export async function applyConstraintAngleLength({
|
||||
|
||||
const { transforms } = angleLength
|
||||
const sketched = transformAstSketchLines({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges,
|
||||
transformInfos: transforms,
|
||||
programMemory: kclManager.programMemory,
|
||||
@ -139,7 +139,7 @@ export async function applyConstraintAngleLength({
|
||||
}
|
||||
|
||||
const retval = transformAstSketchLines({
|
||||
ast: JSON.parse(JSON.stringify(kclManager.ast)),
|
||||
ast: kclManager.ast,
|
||||
selectionRanges,
|
||||
transformInfos: transforms,
|
||||
programMemory: kclManager.programMemory,
|
||||
|
@ -23,7 +23,7 @@ export default class EditorManager {
|
||||
}
|
||||
|
||||
private _lastSelectionEvent: number | null = null
|
||||
private _lastSelection: string = ''
|
||||
lastSelection: string = ''
|
||||
private _lastEvent: { event: string; time: number } | null = null
|
||||
|
||||
private _modelingSend: (eventInfo: ModelingMachineEvent) => void = () => {}
|
||||
@ -199,12 +199,14 @@ export default class EditorManager {
|
||||
viewUpdate?.state?.selection?.ranges || []
|
||||
)
|
||||
|
||||
if (selString === this._lastSelection) {
|
||||
if (selString === this.lastSelection) {
|
||||
// onUpdate is noisy and is fired a lot by extensions
|
||||
// since we're only interested in selections changes we can ignore most of these.
|
||||
return
|
||||
}
|
||||
this._lastSelection = selString
|
||||
// note this is also set from the "Set selection" action to stop code mirror from updating selections right after
|
||||
// selections are made from the scene
|
||||
this.lastSelection = selString
|
||||
|
||||
if (
|
||||
this._lastSelectionEvent &&
|
||||
|
@ -1,54 +1,197 @@
|
||||
import { LspContext, LspWorkerEventType } from './types'
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import * as LSP from 'vscode-languageserver-protocol'
|
||||
|
||||
import {
|
||||
LanguageClient,
|
||||
LanguageClientOptions,
|
||||
} from 'vscode-languageclient/browser'
|
||||
import Worker from 'editor/plugins/lsp/worker.ts?worker'
|
||||
registerServerCapability,
|
||||
unregisterServerCapability,
|
||||
} from './server-capability-registration'
|
||||
import { Codec, FromServer, IntoServer } from './codec'
|
||||
import { err } from 'lib/trap'
|
||||
|
||||
export default class LspServerClient {
|
||||
context: LspContext
|
||||
client: LanguageClient | null = null
|
||||
worker: Worker | null = null
|
||||
const client_capabilities: LSP.ClientCapabilities = {
|
||||
textDocument: {
|
||||
hover: {
|
||||
dynamicRegistration: true,
|
||||
contentFormat: ['plaintext', 'markdown'],
|
||||
},
|
||||
moniker: {},
|
||||
synchronization: {
|
||||
dynamicRegistration: true,
|
||||
willSave: false,
|
||||
didSave: false,
|
||||
willSaveWaitUntil: false,
|
||||
},
|
||||
completion: {
|
||||
dynamicRegistration: true,
|
||||
completionItem: {
|
||||
snippetSupport: false,
|
||||
commitCharactersSupport: true,
|
||||
documentationFormat: ['plaintext', 'markdown'],
|
||||
deprecatedSupport: false,
|
||||
preselectSupport: false,
|
||||
},
|
||||
contextSupport: false,
|
||||
},
|
||||
signatureHelp: {
|
||||
dynamicRegistration: true,
|
||||
signatureInformation: {
|
||||
documentationFormat: ['plaintext', 'markdown'],
|
||||
},
|
||||
},
|
||||
declaration: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
definition: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
typeDefinition: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
implementation: {
|
||||
dynamicRegistration: true,
|
||||
linkSupport: true,
|
||||
},
|
||||
},
|
||||
workspace: {
|
||||
didChangeConfiguration: {
|
||||
dynamicRegistration: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
constructor(context: LspContext) {
|
||||
this.context = context
|
||||
export default class Client extends jsrpc.JSONRPCServerAndClient {
|
||||
afterInitializedHooks: (() => Promise<void>)[] = []
|
||||
#fromServer: FromServer
|
||||
private serverCapabilities: LSP.ServerCapabilities<any> = {}
|
||||
private notifyFn: ((message: LSP.NotificationMessage) => void) | null = null
|
||||
|
||||
constructor(fromServer: FromServer, intoServer: IntoServer) {
|
||||
super(
|
||||
new jsrpc.JSONRPCServer(),
|
||||
new jsrpc.JSONRPCClient(async (json: jsrpc.JSONRPCRequest) => {
|
||||
const encoded = Codec.encode(json)
|
||||
intoServer.enqueue(encoded)
|
||||
if (null != json.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const response = await fromServer.responses.get(json.id)
|
||||
this.client.receive(response as jsrpc.JSONRPCResponse)
|
||||
}
|
||||
})
|
||||
)
|
||||
this.#fromServer = fromServer
|
||||
}
|
||||
|
||||
async startServer() {
|
||||
this.worker = new Worker({ name: this.context.worker })
|
||||
this.worker.postMessage({
|
||||
worker: this.context.worker,
|
||||
eventType: LspWorkerEventType.Init,
|
||||
eventData: this.context.options,
|
||||
})
|
||||
}
|
||||
|
||||
async startClient() {
|
||||
const clientOptions: LanguageClientOptions = {
|
||||
documentSelector: [{ language: 'kcl' }],
|
||||
diagnosticCollectionName: 'markers',
|
||||
}
|
||||
|
||||
if (!this.worker) {
|
||||
console.error('Worker not initialized')
|
||||
async start(): Promise<void> {
|
||||
// process "window/logMessage": client <- server
|
||||
this.addMethod(LSP.LogMessageNotification.type.method, (params) => {
|
||||
const { type, message } = params as {
|
||||
type: LSP.MessageType
|
||||
message: string
|
||||
}
|
||||
let messageString = ''
|
||||
switch (type) {
|
||||
case LSP.MessageType.Error: {
|
||||
messageString += '[error] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Warning: {
|
||||
messageString += ' [warn] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Info: {
|
||||
messageString += ' [info] '
|
||||
break
|
||||
}
|
||||
case LSP.MessageType.Log: {
|
||||
messageString += ' [log] '
|
||||
break
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
messageString += message
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
this.client = new LanguageClient(
|
||||
this.context.worker + 'LspClient',
|
||||
this.context.worker + ' LSP Client',
|
||||
clientOptions,
|
||||
this.worker
|
||||
// process "client/registerCapability": client <- server
|
||||
this.addMethod(LSP.RegistrationRequest.type.method, (params) => {
|
||||
// Register a server capability.
|
||||
params.registrations.forEach(
|
||||
(capabilityRegistration: LSP.Registration) => {
|
||||
const caps = registerServerCapability(
|
||||
this.serverCapabilities,
|
||||
capabilityRegistration
|
||||
)
|
||||
if (err(caps)) return (this.serverCapabilities = {})
|
||||
this.serverCapabilities = caps
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
// process "client/unregisterCapability": client <- server
|
||||
this.addMethod(LSP.UnregistrationRequest.type.method, (params) => {
|
||||
// Unregister a server capability.
|
||||
params.unregisterations.forEach(
|
||||
(capabilityUnregistration: LSP.Unregistration) => {
|
||||
const caps = unregisterServerCapability(
|
||||
this.serverCapabilities,
|
||||
capabilityUnregistration
|
||||
)
|
||||
if (err(caps)) return (this.serverCapabilities = {})
|
||||
this.serverCapabilities = caps
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
// request "initialize": client <-> server
|
||||
const { capabilities } = await this.request(
|
||||
LSP.InitializeRequest.type.method,
|
||||
{
|
||||
processId: null,
|
||||
clientInfo: {
|
||||
name: 'kcl-language-client',
|
||||
},
|
||||
capabilities: client_capabilities,
|
||||
rootUri: null,
|
||||
} as LSP.InitializeParams
|
||||
)
|
||||
|
||||
try {
|
||||
await this.client.start()
|
||||
} catch (error) {
|
||||
this.client.error(`Start failed`, error, 'force')
|
||||
this.serverCapabilities = capabilities
|
||||
|
||||
// notify "initialized": client --> server
|
||||
this.notify(LSP.InitializedNotification.type.method, {})
|
||||
|
||||
await Promise.all(
|
||||
this.afterInitializedHooks.map((f: () => Promise<void>) => f())
|
||||
)
|
||||
await Promise.all([this.processNotifications(), this.processRequests()])
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||
return this.serverCapabilities
|
||||
}
|
||||
|
||||
setNotifyFn(fn: (message: LSP.NotificationMessage) => void): void {
|
||||
this.notifyFn = fn
|
||||
}
|
||||
|
||||
async processNotifications(): Promise<void> {
|
||||
for await (const notification of this.#fromServer.notifications) {
|
||||
if (this.notifyFn) {
|
||||
this.notifyFn(notification)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deactivate() {
|
||||
return this.client?.stop()
|
||||
async processRequests(): Promise<void> {
|
||||
for await (const request of this.#fromServer.requests) {
|
||||
await this.receiveAndSend(request)
|
||||
}
|
||||
}
|
||||
|
||||
pushAfterInitializeHook(...hooks: (() => Promise<void>)[]): void {
|
||||
this.afterInitializedHooks.push(...hooks)
|
||||
}
|
||||
}
|
||||
|
79
src/editor/plugins/lsp/codec.ts
Normal file
79
src/editor/plugins/lsp/codec.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import * as vsrpc from 'vscode-jsonrpc'
|
||||
|
||||
import Bytes from './codec/bytes'
|
||||
import StreamDemuxer from './codec/demuxer'
|
||||
import Headers from './codec/headers'
|
||||
import Queue from './codec/queue'
|
||||
import Tracer from './tracer'
|
||||
import { LspWorkerEventType, LspWorker } from './types'
|
||||
|
||||
export const encoder = new TextEncoder()
|
||||
export const decoder = new TextDecoder()
|
||||
|
||||
export class Codec {
|
||||
static encode(
|
||||
json: jsrpc.JSONRPCRequest | jsrpc.JSONRPCResponse
|
||||
): Uint8Array {
|
||||
const message = JSON.stringify(json)
|
||||
const delimited = Headers.add(message)
|
||||
return Bytes.encode(delimited)
|
||||
}
|
||||
|
||||
static decode<T>(data: Uint8Array): T {
|
||||
const delimited = Bytes.decode(data)
|
||||
const message = Headers.remove(delimited)
|
||||
return JSON.parse(message) as T
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: tracing efficiency
|
||||
export class IntoServer
|
||||
extends Queue<Uint8Array>
|
||||
implements AsyncGenerator<Uint8Array, never, void>
|
||||
{
|
||||
private worker: Worker | null = null
|
||||
private type_: LspWorker | null = null
|
||||
constructor(type_?: LspWorker, worker?: Worker) {
|
||||
super()
|
||||
if (worker && type_) {
|
||||
this.worker = worker
|
||||
this.type_ = type_
|
||||
}
|
||||
}
|
||||
enqueue(item: Uint8Array): void {
|
||||
Tracer.client(Headers.remove(decoder.decode(item)))
|
||||
if (this.worker) {
|
||||
this.worker.postMessage({
|
||||
worker: this.type_,
|
||||
eventType: LspWorkerEventType.Call,
|
||||
eventData: item,
|
||||
})
|
||||
} else {
|
||||
super.enqueue(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface FromServer extends WritableStream<Uint8Array> {
|
||||
readonly responses: {
|
||||
get(key: number | string): null | Promise<vsrpc.ResponseMessage>
|
||||
}
|
||||
readonly notifications: AsyncGenerator<vsrpc.NotificationMessage, never, void>
|
||||
readonly requests: AsyncGenerator<vsrpc.RequestMessage, never, void>
|
||||
|
||||
add(item: Uint8Array): void
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace FromServer {
|
||||
export function create(): FromServer | Error {
|
||||
// Calls private method .start() which can throw.
|
||||
// This is an odd one of the bunch but try/catch seems most suitable here.
|
||||
try {
|
||||
return new StreamDemuxer()
|
||||
} catch (e: any) {
|
||||
return e
|
||||
}
|
||||
}
|
||||
}
|
27
src/editor/plugins/lsp/codec/bytes.ts
Normal file
27
src/editor/plugins/lsp/codec/bytes.ts
Normal file
@ -0,0 +1,27 @@
|
||||
import { encoder, decoder } from '../codec'
|
||||
|
||||
export default class Bytes {
|
||||
static encode(input: string): Uint8Array {
|
||||
return encoder.encode(input)
|
||||
}
|
||||
|
||||
static decode(input: Uint8Array): string {
|
||||
return decoder.decode(input)
|
||||
}
|
||||
|
||||
static append<
|
||||
T extends { length: number; set(arr: T, offset: number): void }
|
||||
>(constructor: { new (length: number): T }, ...arrays: T[]) {
|
||||
let totalLength = 0
|
||||
for (const arr of arrays) {
|
||||
totalLength += arr.length
|
||||
}
|
||||
const result = new constructor(totalLength)
|
||||
let offset = 0
|
||||
for (const arr of arrays) {
|
||||
result.set(arr, offset)
|
||||
offset += arr.length
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
101
src/editor/plugins/lsp/codec/demuxer.ts
Normal file
101
src/editor/plugins/lsp/codec/demuxer.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import * as vsrpc from 'vscode-jsonrpc'
|
||||
|
||||
import Bytes from './bytes'
|
||||
import PromiseMap from './map'
|
||||
import Queue from './queue'
|
||||
import Tracer from '../tracer'
|
||||
import { Codec } from '../codec'
|
||||
|
||||
export default class StreamDemuxer extends Queue<Uint8Array> {
|
||||
readonly responses: PromiseMap<number | string, vsrpc.ResponseMessage> =
|
||||
new PromiseMap()
|
||||
readonly notifications: Queue<vsrpc.NotificationMessage> =
|
||||
new Queue<vsrpc.NotificationMessage>()
|
||||
readonly requests: Queue<vsrpc.RequestMessage> =
|
||||
new Queue<vsrpc.RequestMessage>()
|
||||
|
||||
readonly #start: Promise<void>
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.#start = this.start()
|
||||
}
|
||||
|
||||
private async start(): Promise<void> {
|
||||
let contentLength: null | number = null
|
||||
let buffer = new Uint8Array()
|
||||
|
||||
for await (const bytes of this) {
|
||||
buffer = Bytes.append(Uint8Array, buffer, bytes)
|
||||
while (buffer.length > 0) {
|
||||
// check if the content length is known
|
||||
if (null == contentLength) {
|
||||
// if not, try to match the prefixed headers
|
||||
const match = Bytes.decode(buffer).match(
|
||||
/^Content-Length:\s*(\d+)\s*/
|
||||
)
|
||||
if (null == match) continue
|
||||
|
||||
// try to parse the content-length from the headers
|
||||
const length = parseInt(match[1])
|
||||
|
||||
if (isNaN(length))
|
||||
return Promise.reject(new Error('invalid content length'))
|
||||
|
||||
// slice the headers since we now have the content length
|
||||
buffer = buffer.slice(match[0].length)
|
||||
|
||||
// set the content length
|
||||
contentLength = length
|
||||
}
|
||||
|
||||
// if the buffer doesn't contain a full message; await another iteration
|
||||
if (buffer.length < contentLength) continue
|
||||
|
||||
// Get just the slice of the buffer that is our content length.
|
||||
const slice = buffer.slice(0, contentLength)
|
||||
|
||||
// decode buffer to a string
|
||||
const delimited = Bytes.decode(slice)
|
||||
|
||||
// reset the buffer
|
||||
buffer = buffer.slice(contentLength)
|
||||
// reset the contentLength
|
||||
contentLength = null
|
||||
|
||||
const message = JSON.parse(delimited) as vsrpc.Message
|
||||
Tracer.server(message)
|
||||
|
||||
// demux the message stream
|
||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
||||
this.responses.set(message.id, message)
|
||||
continue
|
||||
}
|
||||
if (vsrpc.Message.isNotification(message)) {
|
||||
this.notifications.enqueue(message)
|
||||
continue
|
||||
}
|
||||
if (vsrpc.Message.isRequest(message)) {
|
||||
this.requests.enqueue(message)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
add(bytes: Uint8Array): void {
|
||||
const message = Codec.decode(bytes) as vsrpc.Message
|
||||
Tracer.server(message)
|
||||
|
||||
// demux the message stream
|
||||
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
||||
this.responses.set(message.id, message)
|
||||
}
|
||||
if (vsrpc.Message.isNotification(message)) {
|
||||
this.notifications.enqueue(message)
|
||||
}
|
||||
if (vsrpc.Message.isRequest(message)) {
|
||||
this.requests.enqueue(message)
|
||||
}
|
||||
}
|
||||
}
|
9
src/editor/plugins/lsp/codec/headers.ts
Normal file
9
src/editor/plugins/lsp/codec/headers.ts
Normal file
@ -0,0 +1,9 @@
|
||||
export default class Headers {
|
||||
static add(message: string): string {
|
||||
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||
}
|
||||
|
||||
static remove(delimited: string): string {
|
||||
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||
}
|
||||
}
|
72
src/editor/plugins/lsp/codec/map.ts
Normal file
72
src/editor/plugins/lsp/codec/map.ts
Normal file
@ -0,0 +1,72 @@
|
||||
export default class PromiseMap<K, V extends { toString(): string }> {
|
||||
#map: Map<K, PromiseMap.Entry<V>> = new Map()
|
||||
|
||||
get(key: K & { toString(): string }): null | Promise<V> {
|
||||
let initialized: PromiseMap.Entry<V>
|
||||
// if the entry doesn't exist, set it
|
||||
if (!this.#map.has(key)) {
|
||||
initialized = this.#set(key)
|
||||
} else {
|
||||
// otherwise return the entry
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
initialized = this.#map.get(key)!
|
||||
}
|
||||
// if the entry is a pending promise, return it
|
||||
if (initialized.status === 'pending') {
|
||||
return initialized.promise
|
||||
} else {
|
||||
// otherwise return null
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
#set(key: K, value?: V): PromiseMap.Entry<V> {
|
||||
if (this.#map.has(key)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
return this.#map.get(key)!
|
||||
}
|
||||
// placeholder resolver for entry
|
||||
let resolve = (item: V) => {
|
||||
void item
|
||||
}
|
||||
// promise for entry (which assigns the resolver
|
||||
const promise = new Promise<V>((resolver) => {
|
||||
resolve = resolver
|
||||
})
|
||||
// the initialized entry
|
||||
const initialized: PromiseMap.Entry<V> = {
|
||||
status: 'pending',
|
||||
resolve,
|
||||
promise,
|
||||
}
|
||||
if (null != value) {
|
||||
initialized.resolve(value)
|
||||
}
|
||||
// set the entry
|
||||
this.#map.set(key, initialized)
|
||||
return initialized
|
||||
}
|
||||
|
||||
set(key: K & { toString(): string }, value: V): this {
|
||||
const initialized = this.#set(key, value)
|
||||
// if the promise is pending ...
|
||||
if (initialized.status === 'pending') {
|
||||
// ... set the entry status to resolved to free the promise
|
||||
this.#map.set(key, { status: 'resolved' })
|
||||
// ... and resolve the promise with the given value
|
||||
initialized.resolve(value)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.#map.size
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace PromiseMap {
|
||||
export type Entry<V> =
|
||||
| { status: 'pending'; resolve: (item: V) => void; promise: Promise<V> }
|
||||
| { status: 'resolved' }
|
||||
}
|
@ -33,19 +33,15 @@ export default class Queue<T>
|
||||
}
|
||||
}
|
||||
|
||||
constructor(stream?: WritableStream<T>) {
|
||||
constructor() {
|
||||
const closed = this.#closed
|
||||
const promises = this.#promises
|
||||
const resolvers = this.#resolvers
|
||||
if (stream) {
|
||||
this.#stream = stream
|
||||
} else {
|
||||
this.#stream = new WritableStream({
|
||||
write(item: T): void {
|
||||
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||
},
|
||||
})
|
||||
}
|
||||
this.#stream = new WritableStream({
|
||||
write(item: T): void {
|
||||
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#add(): void {
|
@ -1,5 +1,6 @@
|
||||
import type * as LSP from 'vscode-languageserver-protocol'
|
||||
import LspServerClient from './client'
|
||||
import Client from './client'
|
||||
import { SemanticToken, deserializeTokens } from './kcl/semantic_tokens'
|
||||
import { LanguageServerPlugin } from 'editor/plugins/lsp/plugin'
|
||||
import { CopilotLspCompletionParams } from 'wasm-lib/kcl/bindings/CopilotLspCompletionParams'
|
||||
import { CopilotCompletionResponse } from 'wasm-lib/kcl/bindings/CopilotCompletionResponse'
|
||||
@ -9,7 +10,7 @@ import { UpdateUnitsParams } from 'wasm-lib/kcl/bindings/UpdateUnitsParams'
|
||||
import { UpdateCanExecuteParams } from 'wasm-lib/kcl/bindings/UpdateCanExecuteParams'
|
||||
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
|
||||
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
|
||||
import { LspWorker } from 'editor/plugins/lsp/types'
|
||||
import { LspWorker } from './types'
|
||||
|
||||
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
|
||||
|
||||
@ -53,7 +54,7 @@ interface LSPNotifyMap {
|
||||
}
|
||||
|
||||
export interface LanguageServerClientOptions {
|
||||
client: LspServerClient
|
||||
client: Client
|
||||
name: LspWorker
|
||||
}
|
||||
|
||||
@ -66,7 +67,7 @@ export interface LanguageServerOptions {
|
||||
}
|
||||
|
||||
export class LanguageServerClient {
|
||||
private client: LspServerClient
|
||||
private client: Client
|
||||
readonly name: string
|
||||
|
||||
public ready: boolean
|
||||
@ -76,8 +77,7 @@ export class LanguageServerClient {
|
||||
public initializePromise: Promise<void>
|
||||
|
||||
private isUpdatingSemanticTokens: boolean = false
|
||||
// tODO: Fix this type
|
||||
private semanticTokens: any = {}
|
||||
private semanticTokens: SemanticToken[] = []
|
||||
private queuedUids: string[] = []
|
||||
|
||||
constructor(options: LanguageServerClientOptions) {
|
||||
@ -93,7 +93,8 @@ export class LanguageServerClient {
|
||||
|
||||
async initialize() {
|
||||
// Start the client in the background.
|
||||
this.client.startClient()
|
||||
this.client.setNotifyFn(this.processNotifications.bind(this))
|
||||
this.client.start()
|
||||
|
||||
this.ready = true
|
||||
}
|
||||
@ -102,6 +103,10 @@ export class LanguageServerClient {
|
||||
return this.name
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||
return this.client.getServerCapabilities()
|
||||
}
|
||||
|
||||
close() {}
|
||||
|
||||
textDocumentDidOpen(params: LSP.DidOpenTextDocumentParams) {
|
||||
@ -112,10 +117,13 @@ export class LanguageServerClient {
|
||||
plugin.documentUri = params.textDocument.uri
|
||||
plugin.languageId = params.textDocument.languageId
|
||||
}
|
||||
|
||||
this.updateSemanticTokens(params.textDocument.uri)
|
||||
}
|
||||
|
||||
textDocumentDidChange(params: LSP.DidChangeTextDocumentParams) {
|
||||
this.notify('textDocument/didChange', params)
|
||||
this.updateSemanticTokens(params.textDocument.uri)
|
||||
}
|
||||
|
||||
textDocumentDidClose(params: LSP.DidCloseTextDocumentParams) {
|
||||
@ -152,19 +160,64 @@ export class LanguageServerClient {
|
||||
this.notify('workspace/didDeleteFiles', params)
|
||||
}
|
||||
|
||||
async updateSemanticTokens(uri: string) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.semanticTokensProvider) {
|
||||
return
|
||||
}
|
||||
|
||||
// Make sure we can only run, if we aren't already running.
|
||||
if (!this.isUpdatingSemanticTokens) {
|
||||
this.isUpdatingSemanticTokens = true
|
||||
|
||||
const result = await this.request('textDocument/semanticTokens/full', {
|
||||
textDocument: {
|
||||
uri,
|
||||
},
|
||||
})
|
||||
|
||||
this.semanticTokens = await deserializeTokens(
|
||||
result.data,
|
||||
this.getServerCapabilities().semanticTokensProvider
|
||||
)
|
||||
|
||||
this.isUpdatingSemanticTokens = false
|
||||
}
|
||||
}
|
||||
|
||||
getSemanticTokens(): SemanticToken[] {
|
||||
return this.semanticTokens
|
||||
}
|
||||
|
||||
async textDocumentHover(params: LSP.HoverParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.hoverProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/hover', params)
|
||||
}
|
||||
|
||||
async textDocumentFormatting(params: LSP.DocumentFormattingParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.documentFormattingProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/formatting', params)
|
||||
}
|
||||
|
||||
async textDocumentFoldingRange(params: LSP.FoldingRangeParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.foldingRangeProvider) {
|
||||
return
|
||||
}
|
||||
return await this.request('textDocument/foldingRange', params)
|
||||
}
|
||||
|
||||
async textDocumentCompletion(params: LSP.CompletionParams) {
|
||||
const serverCapabilities = this.getServerCapabilities()
|
||||
if (!serverCapabilities.completionProvider) {
|
||||
return
|
||||
}
|
||||
const response = await this.request('textDocument/completion', params)
|
||||
return response
|
||||
}
|
||||
@ -183,19 +236,14 @@ export class LanguageServerClient {
|
||||
method: K,
|
||||
params: LSPRequestMap[K][0]
|
||||
): Promise<LSPRequestMap[K][1]> {
|
||||
return this.client.client?.sendRequest(method, params) as Promise<
|
||||
LSPRequestMap[K][1]
|
||||
>
|
||||
return this.client.request(method, params) as Promise<LSPRequestMap[K][1]>
|
||||
}
|
||||
|
||||
private notify<K extends keyof LSPNotifyMap>(
|
||||
method: K,
|
||||
params: LSPNotifyMap[K]
|
||||
): Promise<void> {
|
||||
if (!this.client.client) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
return this.client.client.sendNotification(method, params)
|
||||
): void {
|
||||
return this.client.notify(method, params)
|
||||
}
|
||||
|
||||
async getCompletion(params: CopilotLspCompletionParams) {
|
||||
@ -205,33 +253,6 @@ export class LanguageServerClient {
|
||||
return response
|
||||
}
|
||||
|
||||
getServerCapabilities(): LSP.ServerCapabilities<any> | null {
|
||||
if (!this.client.client) {
|
||||
return null
|
||||
}
|
||||
|
||||
// TODO: Fix this type
|
||||
return null
|
||||
}
|
||||
|
||||
async updateSemanticTokens(uri: string) {
|
||||
// Make sure we can only run, if we aren't already running.
|
||||
if (!this.isUpdatingSemanticTokens) {
|
||||
this.isUpdatingSemanticTokens = true
|
||||
|
||||
this.semanticTokens = await this.request(
|
||||
'textDocument/semanticTokens/full',
|
||||
{
|
||||
textDocument: {
|
||||
uri,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
this.isUpdatingSemanticTokens = false
|
||||
}
|
||||
}
|
||||
|
||||
async accept(uuid: string) {
|
||||
const badUids = this.queuedUids.filter((u) => u !== uuid)
|
||||
this.queuedUids = []
|
||||
@ -265,7 +286,6 @@ export class LanguageServerClient {
|
||||
return await this.request('kcl/updateCanExecute', params)
|
||||
}
|
||||
|
||||
// TODO: Fix this type
|
||||
private processNotifications(notification: LSP.NotificationMessage) {
|
||||
for (const plugin of this.plugins) plugin.processNotification(notification)
|
||||
}
|
||||
|
@ -122,13 +122,13 @@ export function kclPlugin(options: LanguageServerOptions): Extension {
|
||||
const line = state.doc.lineAt(pos)
|
||||
let trigKind: CompletionTriggerKind = CompletionTriggerKind.Invoked
|
||||
let trigChar: string | undefined
|
||||
const serverCapabilities = plugin.client.getServerCapabilities()
|
||||
if (
|
||||
serverCapabilities &&
|
||||
!explicit &&
|
||||
serverCapabilities.completionProvider?.triggerCharacters?.includes(
|
||||
line.text[pos - line.from - 1]
|
||||
)
|
||||
plugin.client
|
||||
.getServerCapabilities()
|
||||
.completionProvider?.triggerCharacters?.includes(
|
||||
line.text[pos - line.from - 1]
|
||||
)
|
||||
) {
|
||||
trigKind = CompletionTriggerKind.TriggerCharacter
|
||||
trigChar = line.text[pos - line.from - 1]
|
||||
|
168
src/editor/plugins/lsp/kcl/parser.ts
Normal file
168
src/editor/plugins/lsp/kcl/parser.ts
Normal file
@ -0,0 +1,168 @@
|
||||
// Extends the codemirror Parser for kcl.
|
||||
|
||||
import {
|
||||
Parser,
|
||||
Input,
|
||||
TreeFragment,
|
||||
PartialParse,
|
||||
Tree,
|
||||
NodeType,
|
||||
NodeSet,
|
||||
} from '@lezer/common'
|
||||
import { LanguageServerClient } from 'editor/plugins/lsp'
|
||||
import { posToOffset } from 'editor/plugins/lsp/util'
|
||||
import { SemanticToken } from './semantic_tokens'
|
||||
import { DocInput } from '@codemirror/language'
|
||||
import { tags, styleTags } from '@lezer/highlight'
|
||||
|
||||
export default class KclParser extends Parser {
|
||||
private client: LanguageServerClient
|
||||
|
||||
constructor(client: LanguageServerClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
createParse(
|
||||
input: Input,
|
||||
fragments: readonly TreeFragment[],
|
||||
ranges: readonly { from: number; to: number }[]
|
||||
): PartialParse {
|
||||
let parse: PartialParse = new Context(this, input, fragments, ranges)
|
||||
return parse
|
||||
}
|
||||
|
||||
getTokenTypes(): string[] {
|
||||
return this.client.getServerCapabilities().semanticTokensProvider!.legend
|
||||
.tokenTypes
|
||||
}
|
||||
|
||||
getSemanticTokens(): SemanticToken[] {
|
||||
return this.client.getSemanticTokens()
|
||||
}
|
||||
}
|
||||
|
||||
class Context implements PartialParse {
|
||||
private parser: KclParser
|
||||
private input: DocInput
|
||||
private fragments: readonly TreeFragment[]
|
||||
private ranges: readonly { from: number; to: number }[]
|
||||
|
||||
private nodeTypes: { [key: string]: NodeType }
|
||||
stoppedAt: number = 0
|
||||
|
||||
private semanticTokens: SemanticToken[] = []
|
||||
private currentLine: number = 0
|
||||
private currentColumn: number = 0
|
||||
private nodeSet: NodeSet
|
||||
|
||||
constructor(
|
||||
/// The parser configuration used.
|
||||
parser: KclParser,
|
||||
input: Input,
|
||||
fragments: readonly TreeFragment[],
|
||||
ranges: readonly { from: number; to: number }[]
|
||||
) {
|
||||
this.parser = parser
|
||||
this.input = input as DocInput
|
||||
this.fragments = fragments
|
||||
this.ranges = ranges
|
||||
|
||||
// Iterate over the semantic token types and create a node type for each.
|
||||
this.nodeTypes = {}
|
||||
let nodeArray: NodeType[] = []
|
||||
this.parser.getTokenTypes().forEach((tokenType, index) => {
|
||||
const nodeType = NodeType.define({
|
||||
id: index,
|
||||
name: tokenType,
|
||||
// props: [this.styleTags],
|
||||
})
|
||||
this.nodeTypes[tokenType] = nodeType
|
||||
nodeArray.push(nodeType)
|
||||
})
|
||||
|
||||
this.semanticTokens = this.parser.getSemanticTokens()
|
||||
const styles = styleTags({
|
||||
number: tags.number,
|
||||
variable: tags.variableName,
|
||||
operator: tags.operator,
|
||||
keyword: tags.keyword,
|
||||
string: tags.string,
|
||||
comment: tags.comment,
|
||||
function: tags.function(tags.variableName),
|
||||
})
|
||||
this.nodeSet = new NodeSet(nodeArray).extend(styles)
|
||||
}
|
||||
|
||||
get parsedPos(): number {
|
||||
return 0
|
||||
}
|
||||
|
||||
advance(): Tree | null {
|
||||
if (this.semanticTokens.length === 0) {
|
||||
return new Tree(NodeType.none, [], [], 0)
|
||||
}
|
||||
const tree = this.createTree(this.semanticTokens[0], 0)
|
||||
this.stoppedAt = this.input.doc.length
|
||||
return tree
|
||||
}
|
||||
|
||||
createTree(token: SemanticToken, index: number): Tree {
|
||||
const changedLine = token.delta_line !== 0
|
||||
this.currentLine += token.delta_line
|
||||
if (changedLine) {
|
||||
this.currentColumn = 0
|
||||
}
|
||||
this.currentColumn += token.delta_start
|
||||
|
||||
// Let's get our position relative to the start of the file.
|
||||
let currentPosition = posToOffset(this.input.doc, {
|
||||
line: this.currentLine,
|
||||
character: this.currentColumn,
|
||||
})
|
||||
|
||||
const nodeType = this.nodeSet.types[this.nodeTypes[token.token_type].id]
|
||||
|
||||
if (currentPosition === undefined) {
|
||||
// This is bad and weird.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
if (index >= this.semanticTokens.length - 1) {
|
||||
// We have no children.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
const nextIndex = index + 1
|
||||
const nextToken = this.semanticTokens[nextIndex]
|
||||
const changedLineNext = nextToken.delta_line !== 0
|
||||
const nextLine = this.currentLine + nextToken.delta_line
|
||||
const nextColumn = changedLineNext
|
||||
? nextToken.delta_start
|
||||
: this.currentColumn + nextToken.delta_start
|
||||
const nextPosition = posToOffset(this.input.doc, {
|
||||
line: nextLine,
|
||||
character: nextColumn,
|
||||
})
|
||||
|
||||
if (nextPosition === undefined) {
|
||||
// This is bad and weird.
|
||||
return new Tree(nodeType, [], [], token.length)
|
||||
}
|
||||
|
||||
// Let's get the
|
||||
|
||||
return new Tree(
|
||||
nodeType,
|
||||
[this.createTree(nextToken, nextIndex)],
|
||||
|
||||
// The positions (offsets relative to the start of this tree) of the children.
|
||||
[nextPosition - currentPosition],
|
||||
token.length
|
||||
)
|
||||
}
|
||||
|
||||
stopAt(pos: number) {
|
||||
this.stoppedAt = pos
|
||||
}
|
||||
}
|
51
src/editor/plugins/lsp/kcl/semantic_tokens.ts
Normal file
51
src/editor/plugins/lsp/kcl/semantic_tokens.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import type * as LSP from 'vscode-languageserver-protocol'
|
||||
|
||||
export class SemanticToken {
|
||||
delta_line: number
|
||||
delta_start: number
|
||||
length: number
|
||||
token_type: string
|
||||
token_modifiers_bitset: string
|
||||
|
||||
constructor(
|
||||
delta_line = 0,
|
||||
delta_start = 0,
|
||||
length = 0,
|
||||
token_type = '',
|
||||
token_modifiers_bitset = ''
|
||||
) {
|
||||
this.delta_line = delta_line
|
||||
this.delta_start = delta_start
|
||||
this.length = length
|
||||
this.token_type = token_type
|
||||
this.token_modifiers_bitset = token_modifiers_bitset
|
||||
}
|
||||
}
|
||||
|
||||
export async function deserializeTokens(
|
||||
data: number[],
|
||||
semanticTokensProvider?: LSP.SemanticTokensOptions
|
||||
): Promise<SemanticToken[]> {
|
||||
if (!semanticTokensProvider) {
|
||||
return []
|
||||
}
|
||||
// Check if data length is divisible by 5
|
||||
if (data.length % 5 !== 0) {
|
||||
return Promise.reject(new Error('Length is not divisible by 5'))
|
||||
}
|
||||
|
||||
const tokens = []
|
||||
for (let i = 0; i < data.length; i += 5) {
|
||||
tokens.push(
|
||||
new SemanticToken(
|
||||
data[i],
|
||||
data[i + 1],
|
||||
data[i + 2],
|
||||
semanticTokensProvider.legend.tokenTypes[data[i + 3]],
|
||||
semanticTokensProvider.legend.tokenModifiers[data[i + 4]]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
@ -145,7 +145,11 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
view: EditorView,
|
||||
{ line, character }: { line: number; character: number }
|
||||
): Promise<Tooltip | null> {
|
||||
if (!this.client.ready) return null
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().hoverProvider
|
||||
)
|
||||
return null
|
||||
|
||||
this.sendChange({ documentText: view.state.doc.toString() })
|
||||
const result = await this.client.textDocumentHover({
|
||||
@ -171,7 +175,11 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
}
|
||||
|
||||
async getFoldingRanges(): Promise<LSP.FoldingRange[] | null> {
|
||||
if (!this.client.ready) return null
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().foldingRangeProvider
|
||||
)
|
||||
return null
|
||||
const result = await this.client.textDocumentFoldingRange({
|
||||
textDocument: { uri: this.documentUri },
|
||||
})
|
||||
@ -251,7 +259,11 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
}
|
||||
|
||||
async requestFormatting() {
|
||||
if (!this.client.ready) return null
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().documentFormattingProvider
|
||||
)
|
||||
return null
|
||||
|
||||
this.client.textDocumentDidChange({
|
||||
textDocument: {
|
||||
@ -297,7 +309,11 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
triggerCharacter: string | undefined
|
||||
}
|
||||
): Promise<CompletionResult | null> {
|
||||
if (!this.client.ready) return null
|
||||
if (
|
||||
!this.client.ready ||
|
||||
!this.client.getServerCapabilities().completionProvider
|
||||
)
|
||||
return null
|
||||
|
||||
this.sendChange({
|
||||
documentText: context.state.doc.toString(),
|
||||
|
77
src/editor/plugins/lsp/server-capability-registration.ts
Normal file
77
src/editor/plugins/lsp/server-capability-registration.ts
Normal file
@ -0,0 +1,77 @@
|
||||
import {
|
||||
Registration,
|
||||
ServerCapabilities,
|
||||
Unregistration,
|
||||
} from 'vscode-languageserver-protocol'
|
||||
|
||||
interface IFlexibleServerCapabilities extends ServerCapabilities {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface IMethodServerCapabilityProviderDictionary {
|
||||
[key: string]: string
|
||||
}
|
||||
|
||||
const ServerCapabilitiesProviders: IMethodServerCapabilityProviderDictionary = {
|
||||
'textDocument/hover': 'hoverProvider',
|
||||
'textDocument/completion': 'completionProvider',
|
||||
'textDocument/signatureHelp': 'signatureHelpProvider',
|
||||
'textDocument/definition': 'definitionProvider',
|
||||
'textDocument/typeDefinition': 'typeDefinitionProvider',
|
||||
'textDocument/implementation': 'implementationProvider',
|
||||
'textDocument/references': 'referencesProvider',
|
||||
'textDocument/documentHighlight': 'documentHighlightProvider',
|
||||
'textDocument/documentSymbol': 'documentSymbolProvider',
|
||||
'textDocument/workspaceSymbol': 'workspaceSymbolProvider',
|
||||
'textDocument/codeAction': 'codeActionProvider',
|
||||
'textDocument/codeLens': 'codeLensProvider',
|
||||
'textDocument/documentFormatting': 'documentFormattingProvider',
|
||||
'textDocument/documentRangeFormatting': 'documentRangeFormattingProvider',
|
||||
'textDocument/documentOnTypeFormatting': 'documentOnTypeFormattingProvider',
|
||||
'textDocument/rename': 'renameProvider',
|
||||
'textDocument/documentLink': 'documentLinkProvider',
|
||||
'textDocument/color': 'colorProvider',
|
||||
'textDocument/foldingRange': 'foldingRangeProvider',
|
||||
'textDocument/declaration': 'declarationProvider',
|
||||
'textDocument/executeCommand': 'executeCommandProvider',
|
||||
'textDocument/semanticTokens/full': 'semanticTokensProvider',
|
||||
'textDocument/publishDiagnostics': 'diagnosticsProvider',
|
||||
}
|
||||
|
||||
function registerServerCapability(
|
||||
serverCapabilities: ServerCapabilities,
|
||||
registration: Registration
|
||||
): ServerCapabilities | Error {
|
||||
const serverCapabilitiesCopy =
|
||||
serverCapabilities as IFlexibleServerCapabilities
|
||||
const { method, registerOptions } = registration
|
||||
const providerName = ServerCapabilitiesProviders[method]
|
||||
|
||||
if (providerName) {
|
||||
if (!registerOptions) {
|
||||
serverCapabilitiesCopy[providerName] = true
|
||||
} else {
|
||||
serverCapabilitiesCopy[providerName] = Object.assign({}, registerOptions)
|
||||
}
|
||||
} else {
|
||||
return new Error('Could not register server capability.')
|
||||
}
|
||||
|
||||
return serverCapabilitiesCopy
|
||||
}
|
||||
|
||||
function unregisterServerCapability(
|
||||
serverCapabilities: ServerCapabilities,
|
||||
unregistration: Unregistration
|
||||
): ServerCapabilities {
|
||||
const serverCapabilitiesCopy =
|
||||
serverCapabilities as IFlexibleServerCapabilities
|
||||
const { method } = unregistration
|
||||
const providerName = ServerCapabilitiesProviders[method]
|
||||
|
||||
delete serverCapabilitiesCopy[providerName]
|
||||
|
||||
return serverCapabilitiesCopy
|
||||
}
|
||||
|
||||
export { registerServerCapability, unregisterServerCapability }
|
21
src/editor/plugins/lsp/tracer.ts
Normal file
21
src/editor/plugins/lsp/tracer.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { Message } from 'vscode-languageserver-protocol'
|
||||
|
||||
const env = import.meta.env.MODE
|
||||
|
||||
export default class Tracer {
|
||||
static client(message: string): void {
|
||||
// These are really noisy, so we have a special env var for them.
|
||||
if (env === 'lsp_tracing') {
|
||||
console.log('lsp client message', message)
|
||||
}
|
||||
}
|
||||
|
||||
static server(input: string | Message): void {
|
||||
// These are really noisy, so we have a special env var for them.
|
||||
if (env === 'lsp_tracing') {
|
||||
const message: string =
|
||||
typeof input === 'string' ? input : JSON.stringify(input)
|
||||
console.log('lsp server message', message)
|
||||
}
|
||||
}
|
||||
}
|
@ -4,27 +4,22 @@ export enum LspWorker {
|
||||
Kcl = 'kcl',
|
||||
Copilot = 'copilot',
|
||||
}
|
||||
export interface KclWorkerOptions {
|
||||
wasmUrl: string
|
||||
token: string
|
||||
baseUnit: UnitLength
|
||||
apiBaseUrl: string
|
||||
}
|
||||
|
||||
interface LspWorkerOptions {
|
||||
export interface CopilotWorkerOptions {
|
||||
wasmUrl: string
|
||||
token: string
|
||||
apiBaseUrl: string
|
||||
callback: () => void
|
||||
wasmUrl: string
|
||||
}
|
||||
|
||||
export interface KclWorkerOptions extends LspWorkerOptions {
|
||||
baseUnit: UnitLength
|
||||
}
|
||||
|
||||
export interface CopilotWorkerOptions extends LspWorkerOptions {}
|
||||
|
||||
export interface LspContext {
|
||||
worker: LspWorker
|
||||
options: KclWorkerOptions | CopilotWorkerOptions
|
||||
}
|
||||
|
||||
export enum LspWorkerEventType {
|
||||
Init = 'init',
|
||||
Call = 'call',
|
||||
}
|
||||
|
||||
export interface LspWorkerEvent {
|
||||
|
@ -1,77 +1,23 @@
|
||||
import { Codec, FromServer, IntoServer } from 'editor/plugins/lsp/codec'
|
||||
import { fileSystemManager } from 'lang/std/fileSystemManager'
|
||||
import init, {
|
||||
ServerConfig,
|
||||
copilot_lsp_run,
|
||||
kcl_lsp_run,
|
||||
} from 'wasm-lib/pkg/wasm_lib'
|
||||
import * as jsrpc from 'json-rpc-2.0'
|
||||
import {
|
||||
LspWorkerEventType,
|
||||
LspWorkerEvent,
|
||||
LspWorker,
|
||||
KclWorkerOptions,
|
||||
CopilotWorkerOptions,
|
||||
} from 'editor/plugins/lsp/types'
|
||||
import { EngineCommandManager } from 'lang/std/engineConnection'
|
||||
import { err } from 'lib/trap'
|
||||
import { Message } from 'vscode-languageserver'
|
||||
import { LspWorkerEvent, LspWorkerEventType } from 'editor/plugins/lsp/types'
|
||||
import Queue from 'editor/plugins/lsp/queue'
|
||||
import {
|
||||
BrowserMessageReader,
|
||||
BrowserMessageWriter,
|
||||
} from 'vscode-languageserver-protocol/browser'
|
||||
|
||||
class Headers {
|
||||
static add(message: string): string {
|
||||
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||
}
|
||||
|
||||
static remove(delimited: string): string {
|
||||
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||
}
|
||||
}
|
||||
|
||||
export const encoder = new TextEncoder()
|
||||
export const decoder = new TextDecoder()
|
||||
|
||||
class Codec {
|
||||
static encode(message: Message): Uint8Array {
|
||||
const rpc = JSON.stringify(message.jsonrpc)
|
||||
const delimited = Headers.add(rpc)
|
||||
return encoder.encode(delimited)
|
||||
}
|
||||
|
||||
static decode<T>(data: Uint8Array): T {
|
||||
const delimited = decoder.decode(data)
|
||||
const message = Headers.remove(delimited)
|
||||
return JSON.parse(message) as T
|
||||
}
|
||||
}
|
||||
|
||||
class IntoServer extends Queue<Uint8Array> {
|
||||
constructor(reader: BrowserMessageReader) {
|
||||
super()
|
||||
reader.listen((message: Message) => {
|
||||
super.enqueue(Codec.encode(message))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class FromServer extends Queue<Uint8Array> {
|
||||
constructor(writer: BrowserMessageWriter) {
|
||||
super(
|
||||
new WritableStream({
|
||||
write(item: Uint8Array): void {
|
||||
writer.write(Codec.decode(item))
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const browserReader = new BrowserMessageReader(self)
|
||||
const browserWriter = new BrowserMessageWriter(self)
|
||||
|
||||
const intoServer = new IntoServer(browserReader)
|
||||
const fromServer = new FromServer(browserWriter)
|
||||
const intoServer: IntoServer = new IntoServer()
|
||||
const fromServer: FromServer | Error = FromServer.create()
|
||||
|
||||
// Initialise the wasm module.
|
||||
const initialise = async (wasmUrl: string) => {
|
||||
@ -111,7 +57,7 @@ export async function kclLspRun(
|
||||
}
|
||||
|
||||
onmessage = function (event) {
|
||||
if (err(intoServer)) return
|
||||
if (err(fromServer)) return
|
||||
const { worker, eventType, eventData }: LspWorkerEvent = event.data
|
||||
|
||||
switch (eventType) {
|
||||
@ -149,7 +95,35 @@ onmessage = function (event) {
|
||||
console.error('Worker: Error loading wasm module', worker, error)
|
||||
})
|
||||
break
|
||||
case LspWorkerEventType.Call:
|
||||
const data = eventData as Uint8Array
|
||||
intoServer.enqueue(data)
|
||||
const json: jsrpc.JSONRPCRequest = Codec.decode(data)
|
||||
if (null != json.id) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
fromServer.responses.get(json.id)!.then((response) => {
|
||||
const encoded = Codec.encode(response as jsrpc.JSONRPCResponse)
|
||||
postMessage(encoded)
|
||||
})
|
||||
}
|
||||
break
|
||||
default:
|
||||
console.error('Worker: Unknown message type', worker, eventType)
|
||||
}
|
||||
}
|
||||
|
||||
new Promise<void>(async (resolve) => {
|
||||
if (err(fromServer)) return
|
||||
for await (const requests of fromServer.requests) {
|
||||
const encoded = Codec.encode(requests as jsrpc.JSONRPCRequest)
|
||||
postMessage(encoded)
|
||||
}
|
||||
})
|
||||
|
||||
new Promise<void>(async (resolve) => {
|
||||
if (err(fromServer)) return
|
||||
for await (const notification of fromServer.notifications) {
|
||||
const encoded = Codec.encode(notification as jsrpc.JSONRPCRequest)
|
||||
postMessage(encoded)
|
||||
}
|
||||
})
|
||||
|
@ -15,6 +15,7 @@ import {
|
||||
sketchOnExtrudedFace,
|
||||
deleteSegmentFromPipeExpression,
|
||||
removeSingleConstraintInfo,
|
||||
deleteFromSelection,
|
||||
} from './modifyAst'
|
||||
import { enginelessExecutor } from '../lib/testHelpers'
|
||||
import { findUsesOfTagInPipe, getNodePathFromSourceRange } from './queryAst'
|
||||
@ -696,3 +697,196 @@ describe('Testing removeSingleConstraintInfo', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Testing deleteFromSelection', () => {
|
||||
const cases = [
|
||||
[
|
||||
'basicCase',
|
||||
{
|
||||
codeBefore: `const myVar = 5
|
||||
const sketch003 = startSketchOn('XZ')
|
||||
|> startProfileAt([3.82, 13.6], %)
|
||||
|> line([-2.94, 2.7], %)
|
||||
|> line([7.7, 0.16], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)`,
|
||||
codeAfter: `const myVar = 5\n`,
|
||||
lineOfInterest: 'line([-2.94, 2.7], %)',
|
||||
type: 'default',
|
||||
},
|
||||
],
|
||||
[
|
||||
'delete extrude',
|
||||
{
|
||||
codeBefore: `const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([3.29, 7.86], %)
|
||||
|> line([2.48, 2.44], %)
|
||||
|> line([2.66, 1.17], %)
|
||||
|> line([3.75, 0.46], %)
|
||||
|> line([4.99, -0.46], %, $seg01)
|
||||
|> line([-3.86, -2.73], %)
|
||||
|> line([-17.67, 0.85], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(10, sketch001)`,
|
||||
codeAfter: `const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([3.29, 7.86], %)
|
||||
|> line([2.48, 2.44], %)
|
||||
|> line([2.66, 1.17], %)
|
||||
|> line([3.75, 0.46], %)
|
||||
|> line([4.99, -0.46], %, $seg01)
|
||||
|> line([-3.86, -2.73], %)
|
||||
|> line([-17.67, 0.85], %)
|
||||
|> close(%)\n`,
|
||||
lineOfInterest: 'line([2.66, 1.17], %)',
|
||||
type: 'extrude-wall',
|
||||
},
|
||||
],
|
||||
[
|
||||
'delete extrude with sketch on it',
|
||||
{
|
||||
codeBefore: `const myVar = 5
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
||||
|> line([0.08, myVar], %)
|
||||
|> line([13.03, 2.02], %, $seg01)
|
||||
|> line([3.9, -7.6], %)
|
||||
|> line([-11.18, -2.15], %)
|
||||
|> line([5.41, -9.61], %)
|
||||
|> line([-8.54, -2.51], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(5, sketch001)
|
||||
const sketch002 = startSketchOn(extrude001, seg01)
|
||||
|> startProfileAt([-12.55, 2.89], %)
|
||||
|> line([3.02, 1.9], %)
|
||||
|> line([1.82, -1.49], %, $seg02)
|
||||
|> angledLine([-86, segLen(seg02, %)], %)
|
||||
|> line([-3.97, -0.53], %)
|
||||
|> line([0.3, 0.84], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)`,
|
||||
codeAfter: `const myVar = 5
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
||||
|> line([0.08, myVar], %)
|
||||
|> line([13.03, 2.02], %, $seg01)
|
||||
|> line([3.9, -7.6], %)
|
||||
|> line([-11.18, -2.15], %)
|
||||
|> line([5.41, -9.61], %)
|
||||
|> line([-8.54, -2.51], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const sketch002 = startSketchOn({
|
||||
plane: {
|
||||
origin: { x: 1, y: 2, z: 3 },
|
||||
x_axis: { x: 4, y: 5, z: 6 },
|
||||
y_axis: { x: 7, y: 8, z: 9 },
|
||||
z_axis: { x: 10, y: 11, z: 12 }
|
||||
}
|
||||
})
|
||||
|> startProfileAt([-12.55, 2.89], %)
|
||||
|> line([3.02, 1.9], %)
|
||||
|> line([1.82, -1.49], %, $seg02)
|
||||
|> angledLine([-86, segLen(seg02, %)], %)
|
||||
|> line([-3.97, -0.53], %)
|
||||
|> line([0.3, 0.84], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
`,
|
||||
lineOfInterest: 'line([-11.18, -2.15], %)',
|
||||
type: 'extrude-wall',
|
||||
},
|
||||
],
|
||||
[
|
||||
'delete extrude with sketch on it',
|
||||
{
|
||||
codeBefore: `const myVar = 5
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
||||
|> line([0.08, myVar], %)
|
||||
|> line([13.03, 2.02], %, $seg01)
|
||||
|> line([3.9, -7.6], %)
|
||||
|> line([-11.18, -2.15], %)
|
||||
|> line([5.41, -9.61], %)
|
||||
|> line([-8.54, -2.51], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const extrude001 = extrude(5, sketch001)
|
||||
const sketch002 = startSketchOn(extrude001, seg01)
|
||||
|> startProfileAt([-12.55, 2.89], %)
|
||||
|> line([3.02, 1.9], %)
|
||||
|> line([1.82, -1.49], %, $seg02)
|
||||
|> angledLine([-86, segLen(seg02, %)], %)
|
||||
|> line([-3.97, -0.53], %)
|
||||
|> line([0.3, 0.84], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)`,
|
||||
codeAfter: `const myVar = 5
|
||||
const sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([4.46, 5.12], %, $tag)
|
||||
|> line([0.08, myVar], %)
|
||||
|> line([13.03, 2.02], %, $seg01)
|
||||
|> line([3.9, -7.6], %)
|
||||
|> line([-11.18, -2.15], %)
|
||||
|> line([5.41, -9.61], %)
|
||||
|> line([-8.54, -2.51], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
const sketch002 = startSketchOn({
|
||||
plane: {
|
||||
origin: { x: 1, y: 2, z: 3 },
|
||||
x_axis: { x: 4, y: 5, z: 6 },
|
||||
y_axis: { x: 7, y: 8, z: 9 },
|
||||
z_axis: { x: 10, y: 11, z: 12 }
|
||||
}
|
||||
})
|
||||
|> startProfileAt([-12.55, 2.89], %)
|
||||
|> line([3.02, 1.9], %)
|
||||
|> line([1.82, -1.49], %, $seg02)
|
||||
|> angledLine([-86, segLen(seg02, %)], %)
|
||||
|> line([-3.97, -0.53], %)
|
||||
|> line([0.3, 0.84], %)
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
`,
|
||||
lineOfInterest: 'startProfileAt([4.46, 5.12], %, $tag)',
|
||||
type: 'end-cap',
|
||||
},
|
||||
],
|
||||
] as const
|
||||
test.each(cases)(
|
||||
'%s',
|
||||
async (name, { codeBefore, codeAfter, lineOfInterest, type }) => {
|
||||
// const lineOfInterest = 'line([-2.94, 2.7], %)'
|
||||
const ast = parse(codeBefore)
|
||||
if (err(ast)) throw ast
|
||||
const programMemory = await enginelessExecutor(ast)
|
||||
|
||||
// deleteFromSelection
|
||||
const range: [number, number] = [
|
||||
codeBefore.indexOf(lineOfInterest),
|
||||
codeBefore.indexOf(lineOfInterest) + lineOfInterest.length,
|
||||
]
|
||||
const newAst = await deleteFromSelection(
|
||||
ast,
|
||||
{
|
||||
range,
|
||||
type,
|
||||
},
|
||||
programMemory,
|
||||
async () => {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
return {
|
||||
origin: { x: 1, y: 2, z: 3 },
|
||||
x_axis: { x: 4, y: 5, z: 6 },
|
||||
y_axis: { x: 7, y: 8, z: 9 },
|
||||
z_axis: { x: 10, y: 11, z: 12 },
|
||||
}
|
||||
}
|
||||
)
|
||||
if (err(newAst)) throw newAst
|
||||
const newCode = recast(newAst)
|
||||
expect(newCode).toBe(codeAfter)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -17,6 +17,7 @@ import {
|
||||
PathToNode,
|
||||
ProgramMemory,
|
||||
SourceRange,
|
||||
SketchGroup,
|
||||
} from './wasm'
|
||||
import {
|
||||
isNodeSafeToReplacePath,
|
||||
@ -25,6 +26,7 @@ import {
|
||||
getNodeFromPath,
|
||||
getNodePathFromSourceRange,
|
||||
isNodeSafeToReplace,
|
||||
traverse,
|
||||
} from './queryAst'
|
||||
import { addTagForSketchOnFace, getConstraintInfo } from './std/sketch'
|
||||
import {
|
||||
@ -38,6 +40,7 @@ import { isOverlap, roundOff } from 'lib/utils'
|
||||
import { KCL_DEFAULT_CONSTANT_PREFIXES } from 'lib/constants'
|
||||
import { ConstrainInfo } from './std/stdTypes'
|
||||
import { TagDeclarator } from 'wasm-lib/kcl/bindings/TagDeclarator'
|
||||
import { Models } from '@kittycad/lib'
|
||||
|
||||
export function startSketchOnDefault(
|
||||
node: Program,
|
||||
@ -707,7 +710,7 @@ export function moveValueIntoNewVariablePath(
|
||||
programMemory,
|
||||
pathToNode
|
||||
)
|
||||
let _node = JSON.parse(JSON.stringify(ast))
|
||||
let _node = ast
|
||||
const boop = replacer(_node, variableName)
|
||||
if (trap(boop)) return { modifiedAst: ast }
|
||||
|
||||
@ -739,7 +742,7 @@ export function moveValueIntoNewVariable(
|
||||
programMemory,
|
||||
sourceRange
|
||||
)
|
||||
let _node = JSON.parse(JSON.stringify(ast))
|
||||
let _node = ast
|
||||
const replaced = replacer(_node, variableName)
|
||||
if (trap(replaced)) return { modifiedAst: ast }
|
||||
|
||||
@ -764,7 +767,7 @@ export function deleteSegmentFromPipeExpression(
|
||||
code: string,
|
||||
pathToNode: PathToNode
|
||||
): Program | Error {
|
||||
let _modifiedAst: Program = JSON.parse(JSON.stringify(modifiedAst))
|
||||
let _modifiedAst: Program = modifiedAst
|
||||
|
||||
dependentRanges.forEach((range) => {
|
||||
const path = getNodePathFromSourceRange(_modifiedAst, range)
|
||||
@ -873,3 +876,175 @@ export function removeSingleConstraintInfo(
|
||||
if (err(retval)) return false
|
||||
return retval
|
||||
}
|
||||
|
||||
export async function deleteFromSelection(
|
||||
ast: Program,
|
||||
selection: Selection,
|
||||
programMemory: ProgramMemory,
|
||||
getFaceDetails: (id: string) => Promise<Models['FaceIsPlanar_type']> = () =>
|
||||
({} as any)
|
||||
): Promise<Program | Error> {
|
||||
const astClone = ast
|
||||
const range = selection.range
|
||||
const path = getNodePathFromSourceRange(ast, range)
|
||||
const varDec = getNodeFromPath<VariableDeclarator>(
|
||||
ast,
|
||||
path,
|
||||
'VariableDeclarator'
|
||||
)
|
||||
if (err(varDec)) return varDec
|
||||
if (
|
||||
(selection.type === 'extrude-wall' ||
|
||||
selection.type === 'end-cap' ||
|
||||
selection.type === 'start-cap') &&
|
||||
varDec.node.init.type === 'PipeExpression'
|
||||
) {
|
||||
const varDecName = varDec.node.id.name
|
||||
let pathToNode: PathToNode | null = null
|
||||
let extrudeNameToDelete = ''
|
||||
traverse(astClone, {
|
||||
enter: (node, path) => {
|
||||
if (node.type === 'VariableDeclaration') {
|
||||
const dec = node.declarations[0]
|
||||
if (
|
||||
dec.init.type === 'CallExpression' &&
|
||||
(dec.init.callee.name === 'extrude' ||
|
||||
dec.init.callee.name === 'revolve') &&
|
||||
dec.init.arguments?.[1].type === 'Identifier' &&
|
||||
dec.init.arguments?.[1].name === varDecName
|
||||
) {
|
||||
pathToNode = path
|
||||
extrudeNameToDelete = dec.id.name
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
if (!pathToNode) return new Error('Could not find extrude variable')
|
||||
|
||||
const expressionIndex = pathToNode[1][0] as number
|
||||
astClone.body.splice(expressionIndex, 1)
|
||||
if (extrudeNameToDelete) {
|
||||
await new Promise(async (resolve) => {
|
||||
let currentVariableName = ''
|
||||
const pathsDependingOnExtrude: Array<{
|
||||
path: PathToNode
|
||||
sketchName: string
|
||||
}> = []
|
||||
traverse(astClone, {
|
||||
leave: (node) => {
|
||||
if (node.type === 'VariableDeclaration') {
|
||||
currentVariableName = ''
|
||||
}
|
||||
},
|
||||
enter: async (node, path) => {
|
||||
if (node.type === 'VariableDeclaration') {
|
||||
currentVariableName = node.declarations[0].id.name
|
||||
}
|
||||
if (
|
||||
// match startSketchOn(${extrudeNameToDelete})
|
||||
node.type === 'CallExpression' &&
|
||||
node.callee.name === 'startSketchOn' &&
|
||||
node.arguments[0].type === 'Identifier' &&
|
||||
node.arguments[0].name === extrudeNameToDelete
|
||||
) {
|
||||
pathsDependingOnExtrude.push({
|
||||
path,
|
||||
sketchName: currentVariableName,
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
const roundLiteral = (x: number) => createLiteral(roundOff(x))
|
||||
const modificationDetails: {
|
||||
parent: PipeExpression['body']
|
||||
faceDetails: Models['FaceIsPlanar_type']
|
||||
lastKey: number
|
||||
}[] = []
|
||||
for (const { path, sketchName } of pathsDependingOnExtrude) {
|
||||
const parent = getNodeFromPath<PipeExpression['body']>(
|
||||
astClone,
|
||||
path.slice(0, -1)
|
||||
)
|
||||
if (err(parent)) {
|
||||
return
|
||||
}
|
||||
const sketchToPreserve = programMemory.root[sketchName] as SketchGroup
|
||||
console.log('sketchName', sketchName)
|
||||
// Can't kick off multiple requests at once as getFaceDetails
|
||||
// is three engine calls in one and they conflict
|
||||
const faceDetails = await getFaceDetails(sketchToPreserve.on.id)
|
||||
if (
|
||||
!(
|
||||
faceDetails.origin &&
|
||||
faceDetails.x_axis &&
|
||||
faceDetails.y_axis &&
|
||||
faceDetails.z_axis
|
||||
)
|
||||
) {
|
||||
return
|
||||
}
|
||||
const lastKey = Number(path.slice(-1)[0][0])
|
||||
modificationDetails.push({
|
||||
parent: parent.node,
|
||||
faceDetails,
|
||||
lastKey,
|
||||
})
|
||||
}
|
||||
for (const { parent, faceDetails, lastKey } of modificationDetails) {
|
||||
if (
|
||||
!(
|
||||
faceDetails.origin &&
|
||||
faceDetails.x_axis &&
|
||||
faceDetails.y_axis &&
|
||||
faceDetails.z_axis
|
||||
)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
parent[lastKey] = createCallExpressionStdLib('startSketchOn', [
|
||||
createObjectExpression({
|
||||
plane: createObjectExpression({
|
||||
origin: createObjectExpression({
|
||||
x: roundLiteral(faceDetails.origin.x),
|
||||
y: roundLiteral(faceDetails.origin.y),
|
||||
z: roundLiteral(faceDetails.origin.z),
|
||||
}),
|
||||
x_axis: createObjectExpression({
|
||||
x: roundLiteral(faceDetails.x_axis.x),
|
||||
y: roundLiteral(faceDetails.x_axis.y),
|
||||
z: roundLiteral(faceDetails.x_axis.z),
|
||||
}),
|
||||
y_axis: createObjectExpression({
|
||||
x: roundLiteral(faceDetails.y_axis.x),
|
||||
y: roundLiteral(faceDetails.y_axis.y),
|
||||
z: roundLiteral(faceDetails.y_axis.z),
|
||||
}),
|
||||
z_axis: createObjectExpression({
|
||||
x: roundLiteral(faceDetails.z_axis.x),
|
||||
y: roundLiteral(faceDetails.z_axis.y),
|
||||
z: roundLiteral(faceDetails.z_axis.z),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
])
|
||||
}
|
||||
resolve(true)
|
||||
})
|
||||
}
|
||||
// await prom
|
||||
return astClone
|
||||
} else if (varDec.node.init.type === 'PipeExpression') {
|
||||
const pipeBody = varDec.node.init.body
|
||||
if (
|
||||
pipeBody[0].type === 'CallExpression' &&
|
||||
pipeBody[0].callee.name === 'startSketchOn'
|
||||
) {
|
||||
// remove varDec
|
||||
const varDecIndex = varDec.shallowPath[1][0] as number
|
||||
astClone.body.splice(varDecIndex, 1)
|
||||
return astClone
|
||||
}
|
||||
}
|
||||
|
||||
return new Error('Selection not recognised, could not delete')
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ import {
|
||||
createPipeSubstitution,
|
||||
} from './modifyAst'
|
||||
import { err } from 'lib/trap'
|
||||
import { warn } from 'node:console'
|
||||
|
||||
beforeAll(async () => {
|
||||
await initPromise
|
||||
@ -86,10 +87,7 @@ const yo2 = hmm([identifierGuy + 5])`
|
||||
expect(result.isSafe).toBe(true)
|
||||
expect(result.value?.type).toBe('BinaryExpression')
|
||||
expect(code.slice(result.value.start, result.value.end)).toBe('100 + 100')
|
||||
const replaced = result.replacer(
|
||||
JSON.parse(JSON.stringify(ast)),
|
||||
'replaceName'
|
||||
)
|
||||
const replaced = result.replacer(ast, 'replaceName')
|
||||
if (err(replaced)) throw replaced
|
||||
const outCode = recast(replaced.modifiedAst)
|
||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||
@ -113,10 +111,7 @@ const yo2 = hmm([identifierGuy + 5])`
|
||||
expect(result.isSafe).toBe(true)
|
||||
expect(result.value?.type).toBe('CallExpression')
|
||||
expect(code.slice(result.value.start, result.value.end)).toBe("def('yo')")
|
||||
const replaced = result.replacer(
|
||||
JSON.parse(JSON.stringify(ast)),
|
||||
'replaceName'
|
||||
)
|
||||
const replaced = result.replacer(ast, 'replaceName')
|
||||
if (err(replaced)) throw replaced
|
||||
const outCode = recast(replaced.modifiedAst)
|
||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||
@ -153,10 +148,7 @@ const yo2 = hmm([identifierGuy + 5])`
|
||||
expect(result.isSafe).toBe(true)
|
||||
expect(result.value?.type).toBe('BinaryExpression')
|
||||
expect(code.slice(result.value.start, result.value.end)).toBe('5 + 6')
|
||||
const replaced = result.replacer(
|
||||
JSON.parse(JSON.stringify(ast)),
|
||||
'replaceName'
|
||||
)
|
||||
const replaced = result.replacer(ast, 'replaceName')
|
||||
if (err(replaced)) throw replaced
|
||||
const outCode = recast(replaced.modifiedAst)
|
||||
expect(outCode).toContain(`const yo = replaceName`)
|
||||
@ -172,10 +164,7 @@ const yo2 = hmm([identifierGuy + 5])`
|
||||
expect(code.slice(result.value.start, result.value.end)).toBe(
|
||||
"jkl('yo') + 2"
|
||||
)
|
||||
const replaced = result.replacer(
|
||||
JSON.parse(JSON.stringify(ast)),
|
||||
'replaceName'
|
||||
)
|
||||
const replaced = result.replacer(ast, 'replaceName')
|
||||
if (err(replaced)) throw replaced
|
||||
const { modifiedAst } = replaced
|
||||
const outCode = recast(modifiedAst)
|
||||
@ -194,10 +183,7 @@ const yo2 = hmm([identifierGuy + 5])`
|
||||
expect(code.slice(result.value.start, result.value.end)).toBe(
|
||||
'identifierGuy + 5'
|
||||
)
|
||||
const replaced = result.replacer(
|
||||
JSON.parse(JSON.stringify(ast)),
|
||||
'replaceName'
|
||||
)
|
||||
const replaced = result.replacer(ast, 'replaceName')
|
||||
if (err(replaced)) throw replaced
|
||||
const { modifiedAst } = replaced
|
||||
const outCode = recast(modifiedAst)
|
||||
|
@ -520,8 +520,8 @@ export function isNodeSafeToReplacePath(
|
||||
const replaceNodeWithIdentifier: ReplacerFn = (_ast, varName) => {
|
||||
const identifier = createIdentifier(varName)
|
||||
const last = finPath[finPath.length - 1]
|
||||
const pathToReplaced = JSON.parse(JSON.stringify(finPath))
|
||||
pathToReplaced[1][0] = pathToReplaced[1][0] + 1
|
||||
const pathToReplaced = finPath
|
||||
pathToReplaced[1][0] = (pathToReplaced[1][0] as number) + 1
|
||||
const startPath = finPath.slice(0, -1)
|
||||
const _nodeToReplace = getNodeFromPath(_ast, startPath)
|
||||
if (err(_nodeToReplace)) return _nodeToReplace
|
||||
|
@ -1496,7 +1496,7 @@ export function transformSecondarySketchLinesTagFirst({
|
||||
}
|
||||
}
|
||||
| Error {
|
||||
// let node = JSON.parse(JSON.stringify(ast))
|
||||
// let node = ast
|
||||
const primarySelection = selectionRanges.codeBasedSelections[0].range
|
||||
|
||||
const _tag = giveSketchFnCallTag(ast, primarySelection, forceSegName)
|
||||
@ -1565,7 +1565,7 @@ export function transformAstSketchLines({
|
||||
}
|
||||
| Error {
|
||||
// deep clone since we are mutating in a loop, of which any could fail
|
||||
let node = JSON.parse(JSON.stringify(ast))
|
||||
let node = ast
|
||||
let _valueUsedInTransform // TODO should this be an array?
|
||||
const pathToNodeMap: PathToNodeMap = {}
|
||||
|
||||
|
@ -33,7 +33,7 @@ export function updatePathToNodeFromMap(
|
||||
oldPath: PathToNode,
|
||||
pathToNodeMap: { [key: number]: PathToNode }
|
||||
): PathToNode {
|
||||
const updatedPathToNode = JSON.parse(JSON.stringify(oldPath))
|
||||
const updatedPathToNode = oldPath
|
||||
let max = 0
|
||||
Object.values(pathToNodeMap).forEach((path) => {
|
||||
const index = Number(path[1][0])
|
||||
|
@ -334,6 +334,7 @@ export async function coreDump(
|
||||
openGithubIssue: boolean = false
|
||||
): Promise<CoreDumpInfo> {
|
||||
try {
|
||||
console.warn('CoreDump: Initializing core dump')
|
||||
const dump: CoreDumpInfo = await coredump(coreDumpManager)
|
||||
/* NOTE: this console output of the coredump should include the field
|
||||
`github_issue_url` which is not in the uploaded coredump file.
|
||||
|
@ -13,6 +13,14 @@ import screenshot from 'lib/screenshot'
|
||||
import React from 'react'
|
||||
import { VITE_KC_API_BASE_URL } from 'env'
|
||||
|
||||
/* eslint-disable suggest-no-throw/suggest-no-throw --
|
||||
* All the throws in CoreDumpManager are intentional and should be caught and handled properly
|
||||
* by the calling Promises with a catch block. The throws are essential to properly handling
|
||||
* when the app isn't ready enough or otherwise unable to produce a core dump. By throwing
|
||||
* instead of simply erroring, the code halts execution at the first point which it cannot
|
||||
* complete the core dump request.
|
||||
**/
|
||||
|
||||
/**
|
||||
* CoreDumpManager module
|
||||
* - for getting all the values from the JS world to pass to the Rust world for a core dump.
|
||||
@ -22,6 +30,7 @@ import { VITE_KC_API_BASE_URL } from 'env'
|
||||
// CoreDumpManager is instantiated in ModelingMachineProvider and passed to coreDump() in wasm.ts
|
||||
// The async function coreDump() handles any errors thrown in its Promise catch method and rethrows
|
||||
// them to so the toast handler in ModelingMachineProvider can show the user an error message toast
|
||||
// TODO: Throw more
|
||||
export class CoreDumpManager {
|
||||
engineCommandManager: EngineCommandManager
|
||||
htmlRef: React.RefObject<HTMLDivElement> | null
|
||||
|
@ -9,12 +9,12 @@ const wallMountL = 6 // the length of the bracket
|
||||
const sigmaAllow = 35000 // psi
|
||||
const width = 6 // inch
|
||||
const p = 300 // Force on shelf - lbs
|
||||
const L = 12 // inches
|
||||
const M = L * p / 2 // Moment experienced at fixed end of bracket
|
||||
const FOS = 2 // Factor of safety of 2 to be conservative
|
||||
const shelfLength = 12 // inches
|
||||
const moment = shelfLength * p / 2 // Moment experienced at fixed end of bracket
|
||||
const factorOfSafety = 2 // Factor of safety of 2 to be conservative
|
||||
|
||||
// Calculate the thickness off the bending stress and factor of safety
|
||||
const thickness = sqrt(6 * M * FOS / (width * sigmaAllow))
|
||||
const thickness = sqrt(6 * moment * factorOfSafety / (width * sigmaAllow))
|
||||
|
||||
// 0.25 inch fillet radius
|
||||
const filletR = 0.25
|
||||
|
@ -29,7 +29,10 @@ export function cleanErrs<T>(
|
||||
return [argsWOutErr.length !== value.length, argsWOutErr, argsWErr]
|
||||
}
|
||||
|
||||
// Used to report errors to user at a certain point in execution
|
||||
/**
|
||||
* Used to report errors to user at a certain point in execution
|
||||
* @returns boolean
|
||||
*/
|
||||
export function trap<T>(
|
||||
value: ExcludeErr<T> | Error,
|
||||
opts?: {
|
||||
@ -43,6 +46,8 @@ export function trap<T>(
|
||||
|
||||
console.error(value)
|
||||
opts?.suppress ||
|
||||
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString())
|
||||
toast.error((opts?.altErr ?? value ?? new Error('Unknown')).toString(), {
|
||||
id: 'error',
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
@ -96,9 +96,7 @@ export function useCalculateKclExpression({
|
||||
ast,
|
||||
engineCommandManager,
|
||||
useFakeExecutor: true,
|
||||
programMemoryOverride: JSON.parse(
|
||||
JSON.stringify(kclManager.programMemory)
|
||||
),
|
||||
programMemoryOverride: kclManager.programMemory,
|
||||
})
|
||||
const resultDeclaration = ast.body.find(
|
||||
(a) =>
|
||||
|
@ -26,7 +26,11 @@ import {
|
||||
applyConstraintEqualLength,
|
||||
setEqualLengthInfo,
|
||||
} from 'components/Toolbar/EqualLength'
|
||||
import { addStartProfileAt, extrudeSketch } from 'lang/modifyAst'
|
||||
import {
|
||||
addStartProfileAt,
|
||||
deleteFromSelection,
|
||||
extrudeSketch,
|
||||
} from 'lang/modifyAst'
|
||||
import { getNodeFromPath } from '../lang/queryAst'
|
||||
import {
|
||||
applyConstraintEqualAngle,
|
||||
@ -44,12 +48,14 @@ import {
|
||||
import { Models } from '@kittycad/lib/dist/types/src'
|
||||
import { ModelingCommandSchema } from 'lib/commandBarConfigs/modelingCommandConfig'
|
||||
import { err, trap } from 'lib/trap'
|
||||
import { DefaultPlaneStr } from 'clientSideScene/sceneEntities'
|
||||
import { DefaultPlaneStr, getFaceDetails } from 'clientSideScene/sceneEntities'
|
||||
import { Vector3 } from 'three'
|
||||
import { quaternionFromUpNForward } from 'clientSideScene/helpers'
|
||||
import { uuidv4 } from 'lib/utils'
|
||||
import { Coords2d } from 'lang/std/sketch'
|
||||
import { deleteSegment } from 'clientSideScene/ClientSideSceneComp'
|
||||
import { executeAst } from 'useStore'
|
||||
import toast from 'react-hot-toast'
|
||||
|
||||
export const MODELING_PERSIST_KEY = 'MODELING_PERSIST_KEY'
|
||||
|
||||
@ -157,6 +163,9 @@ export type ModelingMachineEvent =
|
||||
type: 'Set selection'
|
||||
data: SetSelections
|
||||
}
|
||||
| {
|
||||
type: 'Delete selection'
|
||||
}
|
||||
| { type: 'Sketch no face' }
|
||||
| { type: 'Toggle gui mode' }
|
||||
| { type: 'Cancel' }
|
||||
@ -273,6 +282,13 @@ export const modelingMachine = createMachine(
|
||||
cond: 'Has exportable geometry',
|
||||
actions: 'Engine export',
|
||||
},
|
||||
|
||||
'Delete selection': {
|
||||
target: 'idle',
|
||||
cond: 'has valid selection for deletion',
|
||||
actions: ['AST delete selection'],
|
||||
internal: true,
|
||||
},
|
||||
},
|
||||
|
||||
entry: 'reset client scene mouse handlers',
|
||||
@ -963,6 +979,42 @@ export const modelingMachine = createMachine(
|
||||
editorManager.selectRange(updatedAst?.selections)
|
||||
}
|
||||
},
|
||||
'AST delete selection': async ({ sketchDetails, selectionRanges }) => {
|
||||
let ast = kclManager.ast
|
||||
|
||||
const getScaledFaceDetails = async (entityId: string) => {
|
||||
const faceDetails = await getFaceDetails(entityId)
|
||||
if (err(faceDetails)) return {}
|
||||
return {
|
||||
...faceDetails,
|
||||
origin: {
|
||||
x: faceDetails.origin.x / sceneInfra._baseUnitMultiplier,
|
||||
y: faceDetails.origin.y / sceneInfra._baseUnitMultiplier,
|
||||
z: faceDetails.origin.z / sceneInfra._baseUnitMultiplier,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const modifiedAst = await deleteFromSelection(
|
||||
ast,
|
||||
selectionRanges.codeBasedSelections[0],
|
||||
kclManager.programMemory,
|
||||
getScaledFaceDetails
|
||||
)
|
||||
if (err(modifiedAst)) return
|
||||
|
||||
const testExecute = await executeAst({
|
||||
ast: modifiedAst,
|
||||
useFakeExecutor: true,
|
||||
engineCommandManager,
|
||||
})
|
||||
if (testExecute.errors.length) {
|
||||
toast.error('Unable to delete part')
|
||||
return
|
||||
}
|
||||
|
||||
await kclManager.updateAst(modifiedAst, true)
|
||||
},
|
||||
'conditionally equip line tool': (_, { type }) => {
|
||||
if (type === 'done.invoke.animate-to-face') {
|
||||
sceneInfra.modelingSend('Equip Line tool')
|
||||
|
14
src/wasm-lib/Cargo.lock
generated
14
src/wasm-lib/Cargo.lock
generated
@ -533,6 +533,7 @@ dependencies = [
|
||||
"ciborium",
|
||||
"clap",
|
||||
"criterion-plot",
|
||||
"futures",
|
||||
"is-terminal",
|
||||
"itertools 0.10.5",
|
||||
"num-traits",
|
||||
@ -545,6 +546,7 @@ dependencies = [
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"tinytemplate",
|
||||
"tokio",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
@ -710,7 +712,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "derive-docs"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"anyhow",
|
||||
@ -1383,7 +1385,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.1.67"
|
||||
version = "0.1.68"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx",
|
||||
@ -3275,9 +3277,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "ts-rs"
|
||||
version = "9.0.0"
|
||||
version = "9.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e2dcf58e612adda9a83800731e8e4aba04d8a302b9029617b0b6e4b021d5357"
|
||||
checksum = "b44017f9f875786e543595076374b9ef7d13465a518dd93d6ccdbf5b432dde8c"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"serde_json",
|
||||
@ -3289,9 +3291,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ts-rs-macros"
|
||||
version = "9.0.0"
|
||||
version = "9.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbdee324e50a7402416d9c25270d3df4241ed528af5d36dda18b6f219551c577"
|
||||
checksum = "c88cc88fd23b5a04528f3a8436024f20010a16ec18eb23c164b1242f65860130"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "derive-docs"
|
||||
description = "A tool for generating documentation from Rust derive macros"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
|
@ -761,7 +761,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
||||
is_mock: true,
|
||||
};
|
||||
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -795,7 +795,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default()).await.unwrap();
|
||||
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
|
||||
// Zoom to fit.
|
||||
ctx.engine
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -106,7 +106,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -135,7 +135,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -17,7 +17,7 @@ mod test_examples_my_func {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -47,7 +47,7 @@ mod test_examples_my_func {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -108,7 +108,7 @@ mod test_examples_my_func {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -137,7 +137,7 @@ mod test_examples_my_func {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -17,7 +17,7 @@ mod test_examples_line_to {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -47,7 +47,7 @@ mod test_examples_line_to {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -108,7 +108,7 @@ mod test_examples_line_to {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -137,7 +137,7 @@ mod test_examples_line_to {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_min {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_min {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -106,7 +106,7 @@ mod test_examples_min {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -135,7 +135,7 @@ mod test_examples_min {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -157,7 +157,7 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
|
||||
// Let users know if the test is taking a long time.
|
||||
let (done_tx, done_rx) = oneshot::channel::<()>();
|
||||
let timer = time_until(done_rx);
|
||||
let snapshot = match state.execute_and_prepare_snapshot(program).await {
|
||||
let snapshot = match state.execute_and_prepare_snapshot(&program).await {
|
||||
Ok(sn) => sn,
|
||||
Err(e) => return kcl_err(e),
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.1.67"
|
||||
version = "0.1.68"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -19,7 +19,7 @@ chrono = "0.4.38"
|
||||
clap = { version = "4.5.7", default-features = false, optional = true }
|
||||
dashmap = "6.0.1"
|
||||
databake = { version = "0.1.8", features = ["derive"] }
|
||||
derive-docs = { version = "0.1.18", path = "../derive-docs" }
|
||||
derive-docs = { version = "0.1.19", path = "../derive-docs" }
|
||||
form_urlencoded = "1.2.1"
|
||||
futures = { version = "0.3.30" }
|
||||
git_rev = "0.1.0"
|
||||
@ -28,7 +28,7 @@ kittycad = { workspace = true, features = ["clap"] }
|
||||
lazy_static = "1.5.0"
|
||||
mime_guess = "2.0.4"
|
||||
parse-display = "0.9.1"
|
||||
pyo3 = {version = "0.22.0", optional = true}
|
||||
pyo3 = { version = "0.22.0", optional = true }
|
||||
reqwest = { version = "0.11.26", default-features = false, features = ["stream", "rustls-tls"] }
|
||||
ropey = "1.6.1"
|
||||
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1"] }
|
||||
@ -37,7 +37,7 @@ serde_json = "1.0.118"
|
||||
sha2 = "0.10.8"
|
||||
thiserror = "1.0.61"
|
||||
toml = "0.8.14"
|
||||
ts-rs = { version = "9.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
||||
ts-rs = { version = "9.0.1", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
uuid = { version = "1.9.1", features = ["v4", "js", "serde"] }
|
||||
validator = { version = "0.18.1", features = ["derive"] }
|
||||
@ -67,6 +67,8 @@ cli = ["dep:clap"]
|
||||
disable-println = []
|
||||
engine = []
|
||||
pyo3 = ["dep:pyo3"]
|
||||
# Helper functions also used in benchmarks.
|
||||
lsp-test-util = []
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
@ -78,10 +80,10 @@ debug = true # Flamegraphs of benchmarks require accurate debug symbols
|
||||
[dev-dependencies]
|
||||
base64 = "0.22.1"
|
||||
convert_case = "0.6.0"
|
||||
criterion = "0.5.1"
|
||||
criterion = { version = "0.5.1", features = ["async_tokio"] }
|
||||
expectorate = "1.1.0"
|
||||
iai = "0.1"
|
||||
image = {version = "0.25.1", default-features = false, features = ["png"] }
|
||||
image = { version = "0.25.1", default-features = false, features = ["png"] }
|
||||
insta = { version = "1.38.0", features = ["json"] }
|
||||
itertools = "0.13.0"
|
||||
pretty_assertions = "1.4.0"
|
||||
@ -95,3 +97,13 @@ harness = false
|
||||
[[bench]]
|
||||
name = "compiler_benchmark_iai"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "lsp_semantic_tokens_benchmark_criterion"
|
||||
harness = false
|
||||
required-features = ["lsp-test-util"]
|
||||
|
||||
[[bench]]
|
||||
name = "lsp_semantic_tokens_benchmark_iai"
|
||||
harness = false
|
||||
required-features = ["lsp-test-util"]
|
||||
|
@ -0,0 +1,65 @@
|
||||
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
||||
use tokio::runtime::Runtime;
|
||||
use tower_lsp::LanguageServer;
|
||||
|
||||
async fn kcl_lsp_semantic_tokens(code: &str) {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Send semantic tokens request.
|
||||
black_box(
|
||||
server
|
||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
partial_result_params: Default::default(),
|
||||
work_done_progress_params: Default::default(),
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
fn bench_kcl_lsp_semantic_tokens(c: &mut Criterion) {
|
||||
for (name, code) in [
|
||||
("pipes_on_pipes", PIPES_PROGRAM),
|
||||
("big_kitt", KITT_PROGRAM),
|
||||
("cube", CUBE_PROGRAM),
|
||||
("math", MATH_PROGRAM),
|
||||
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
||||
("global_tags", GLOBAL_TAGS_FILE),
|
||||
] {
|
||||
c.bench_with_input(BenchmarkId::new("semantic_tokens_", name), &code, |b, &s| {
|
||||
let rt = Runtime::new().unwrap();
|
||||
|
||||
// Spawn a future onto the runtime
|
||||
b.iter(|| {
|
||||
rt.block_on(kcl_lsp_semantic_tokens(s));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_kcl_lsp_semantic_tokens);
|
||||
criterion_main!(benches);
|
||||
|
||||
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
|
||||
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||
const CUBE_PROGRAM: &str = include_str!("../../tests/executor/inputs/cube.kcl");
|
||||
const MATH_PROGRAM: &str = include_str!("../../tests/executor/inputs/math.kcl");
|
||||
const MIKE_STRESS_TEST_PROGRAM: &str = include_str!("../../tests/executor/inputs/mike_stress_test.kcl");
|
||||
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
@ -0,0 +1,45 @@
|
||||
use iai::black_box;
|
||||
use kcl_lib::lsp::test_util::kcl_lsp_server;
|
||||
use tower_lsp::LanguageServer;
|
||||
|
||||
async fn kcl_lsp_semantic_tokens(code: &str) {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Send semantic tokens request.
|
||||
black_box(
|
||||
server
|
||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
partial_result_params: Default::default(),
|
||||
work_done_progress_params: Default::default(),
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
async fn semantic_tokens_global_tags() {
|
||||
let code = GLOBAL_TAGS_FILE;
|
||||
kcl_lsp_semantic_tokens(code).await;
|
||||
}
|
||||
|
||||
iai::main! {
|
||||
semantic_tokens_global_tags,
|
||||
}
|
||||
|
||||
const GLOBAL_TAGS_FILE: &str = include_str!("../../tests/executor/inputs/global-tags.kcl");
|
@ -159,7 +159,7 @@ impl Program {
|
||||
RuleT: crate::lint::rule::Rule<'a>,
|
||||
{
|
||||
let v = Arc::new(Mutex::new(vec![]));
|
||||
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
||||
let mut findings = v.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
findings.append(&mut rule.check(node)?);
|
||||
Ok(true)
|
||||
@ -171,13 +171,13 @@ impl Program {
|
||||
/// Walk the ast and get all the variables and tags as completion items.
|
||||
pub fn completion_items<'a>(&'a self) -> Result<Vec<CompletionItem>> {
|
||||
let completions = Arc::new(Mutex::new(vec![]));
|
||||
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
||||
let mut findings = completions.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
match node {
|
||||
crate::lint::Node::TagDeclarator(tag) => {
|
||||
crate::walk::Node::TagDeclarator(tag) => {
|
||||
findings.push(tag.into());
|
||||
}
|
||||
crate::lint::Node::VariableDeclaration(variable) => {
|
||||
crate::walk::Node::VariableDeclaration(variable) => {
|
||||
findings.extend::<Vec<CompletionItem>>(variable.into());
|
||||
}
|
||||
_ => {}
|
||||
@ -255,13 +255,13 @@ impl Program {
|
||||
/// Returns all the lsp symbols in the program.
|
||||
pub fn get_lsp_symbols<'a>(&'a self, code: &str) -> Result<Vec<DocumentSymbol>> {
|
||||
let symbols = Arc::new(Mutex::new(vec![]));
|
||||
crate::lint::walk(self, &|node: crate::lint::Node<'a>| {
|
||||
crate::walk::walk(self, &|node: crate::walk::Node<'a>| {
|
||||
let mut findings = symbols.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
match node {
|
||||
crate::lint::Node::TagDeclarator(tag) => {
|
||||
crate::walk::Node::TagDeclarator(tag) => {
|
||||
findings.extend::<Vec<DocumentSymbol>>(tag.get_lsp_symbols(code));
|
||||
}
|
||||
crate::lint::Node::VariableDeclaration(variable) => {
|
||||
crate::walk::Node::VariableDeclaration(variable) => {
|
||||
findings.extend::<Vec<DocumentSymbol>>(variable.get_lsp_symbols(code));
|
||||
}
|
||||
_ => {}
|
||||
@ -1217,7 +1217,7 @@ impl CallExpression {
|
||||
|
||||
// Call the stdlib function
|
||||
let p = func.function().clone().body;
|
||||
let results = match ctx.inner_execute(p, &mut fn_memory, BodyType::Block).await {
|
||||
let results = match ctx.inner_execute(&p, &mut fn_memory, BodyType::Block).await {
|
||||
Ok(results) => results,
|
||||
Err(err) => {
|
||||
// We need to override the source ranges so we don't get the embedded kcl
|
||||
|
@ -828,7 +828,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
some_function,
|
||||
crate::ast::types::Function::StdLib {
|
||||
func: Box::new(crate::std::sketch::Line),
|
||||
func: Box::new(crate::std::sketch::Line)
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -110,6 +110,8 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
}
|
||||
|
||||
/// Send the modeling cmd and wait for the response.
|
||||
// TODO: This should only borrow `cmd`.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/2821
|
||||
async fn send_modeling_cmd(
|
||||
&self,
|
||||
id: uuid::Uuid,
|
||||
|
@ -142,7 +142,7 @@ impl IntoDiagnostic for KclError {
|
||||
|
||||
Diagnostic {
|
||||
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
severity: Some(self.severity()),
|
||||
code: None,
|
||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||
code_description: None,
|
||||
@ -153,6 +153,10 @@ impl IntoDiagnostic for KclError {
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn severity(&self) -> DiagnosticSeverity {
|
||||
DiagnosticSeverity::ERROR
|
||||
}
|
||||
}
|
||||
|
||||
/// This is different than to_string() in that it will serialize the Error
|
||||
|
@ -16,7 +16,7 @@ use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
fs::FileManager,
|
||||
settings::types::UnitLength,
|
||||
std::{FunctionKind, StdLib},
|
||||
std::{FnAsArg, FunctionKind, StdLib},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||
@ -640,6 +640,52 @@ impl MemoryItem {
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
fn as_user_val(&self) -> Option<&UserVal> {
|
||||
if let MemoryItem::UserVal(x) = self {
|
||||
Some(x)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// If this value is of type u32, return it.
|
||||
pub fn get_u32(&self, source_ranges: Vec<SourceRange>) -> Result<u32, KclError> {
|
||||
let err = KclError::Semantic(KclErrorDetails {
|
||||
message: "Expected an integer >= 0".to_owned(),
|
||||
source_ranges,
|
||||
});
|
||||
self.as_user_val()
|
||||
.and_then(|uv| uv.value.as_number())
|
||||
.and_then(|n| n.as_u64())
|
||||
.and_then(|n| u32::try_from(n).ok())
|
||||
.ok_or(err)
|
||||
}
|
||||
|
||||
/// If this value is of type function, return it.
|
||||
pub fn get_function(&self, source_ranges: Vec<SourceRange>) -> Result<FnAsArg<'_>, KclError> {
|
||||
let MemoryItem::Function {
|
||||
func,
|
||||
expression,
|
||||
meta: _,
|
||||
} = &self
|
||||
else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "not an in-memory function".to_string(),
|
||||
source_ranges,
|
||||
}));
|
||||
};
|
||||
let func = func.as_ref().ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
message: format!("Not an in-memory function: {:?}", expression),
|
||||
source_ranges,
|
||||
})
|
||||
})?;
|
||||
Ok(FnAsArg {
|
||||
func,
|
||||
expr: expression.to_owned(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Backwards compatibility for getting a tag from a memory item.
|
||||
pub fn get_tag_identifier(&self) -> Result<TagIdentifier, KclError> {
|
||||
match self {
|
||||
@ -1455,7 +1501,7 @@ impl ExecutorContext {
|
||||
/// Kurt uses this for partial execution.
|
||||
pub async fn run(
|
||||
&self,
|
||||
program: crate::ast::types::Program,
|
||||
program: &crate::ast::types::Program,
|
||||
memory: Option<ProgramMemory>,
|
||||
) -> Result<ProgramMemory, KclError> {
|
||||
// Before we even start executing the program, set the units.
|
||||
@ -1481,7 +1527,7 @@ impl ExecutorContext {
|
||||
#[async_recursion]
|
||||
pub(crate) async fn inner_execute(
|
||||
&self,
|
||||
program: crate::ast::types::Program,
|
||||
program: &crate::ast::types::Program,
|
||||
memory: &mut ProgramMemory,
|
||||
body_type: BodyType,
|
||||
) -> Result<ProgramMemory, KclError> {
|
||||
@ -1513,9 +1559,7 @@ impl ExecutorContext {
|
||||
}
|
||||
FunctionKind::Std(func) => {
|
||||
let mut newmem = memory.clone();
|
||||
let result = self
|
||||
.inner_execute(func.program().to_owned(), &mut newmem, BodyType::Block)
|
||||
.await?;
|
||||
let result = self.inner_execute(func.program(), &mut newmem, BodyType::Block).await?;
|
||||
memory.return_ = result.return_;
|
||||
}
|
||||
FunctionKind::UserDefined => {
|
||||
@ -1651,7 +1695,7 @@ impl ExecutorContext {
|
||||
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
|
||||
|
||||
let result = ctx
|
||||
.inner_execute(function_expression.body.clone(), &mut fn_memory, BodyType::Block)
|
||||
.inner_execute(&function_expression.body, &mut fn_memory, BodyType::Block)
|
||||
.await?;
|
||||
|
||||
Ok((result.return_, fn_memory.get_tags()))
|
||||
@ -1701,7 +1745,7 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
/// Execute the program, then get a PNG screenshot.
|
||||
pub async fn execute_and_prepare_snapshot(&self, program: Program) -> Result<kittycad::types::TakeSnapshot> {
|
||||
pub async fn execute_and_prepare_snapshot(&self, program: &Program) -> Result<kittycad::types::TakeSnapshot> {
|
||||
let _ = self.run(program, None).await?;
|
||||
|
||||
// Zoom to fit.
|
||||
@ -1818,7 +1862,7 @@ mod tests {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
let memory = ctx.run(program, None).await?;
|
||||
let memory = ctx.run(&program, None).await?;
|
||||
|
||||
Ok(memory)
|
||||
}
|
||||
|
45
src/wasm-lib/kcl/src/function_param.rs
Normal file
45
src/wasm-lib/kcl/src/function_param.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use schemars::JsonSchema;
|
||||
|
||||
use crate::{
|
||||
ast::types::FunctionExpression,
|
||||
errors::KclError,
|
||||
executor::{ExecutorContext, MemoryFunction, MemoryItem, Metadata, ProgramMemory, ProgramReturn},
|
||||
};
|
||||
|
||||
/// A function being used as a parameter into a stdlib function.
|
||||
pub struct FunctionParam<'a> {
|
||||
pub inner: &'a MemoryFunction,
|
||||
pub memory: ProgramMemory,
|
||||
pub fn_expr: Box<FunctionExpression>,
|
||||
pub meta: Vec<Metadata>,
|
||||
pub ctx: ExecutorContext,
|
||||
}
|
||||
|
||||
impl<'a> FunctionParam<'a> {
|
||||
pub async fn call(
|
||||
&self,
|
||||
args: Vec<MemoryItem>,
|
||||
) -> Result<(Option<ProgramReturn>, HashMap<String, MemoryItem>), KclError> {
|
||||
(self.inner)(
|
||||
args,
|
||||
self.memory.clone(),
|
||||
self.fn_expr.clone(),
|
||||
self.meta.clone(),
|
||||
self.ctx.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> JsonSchema for FunctionParam<'a> {
|
||||
fn schema_name() -> String {
|
||||
"FunctionParam".to_owned()
|
||||
}
|
||||
|
||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
// TODO: Actually generate a reasonable schema.
|
||||
gen.subschema_for::<()>()
|
||||
}
|
||||
}
|
@ -20,6 +20,7 @@ pub mod engine;
|
||||
pub mod errors;
|
||||
pub mod executor;
|
||||
pub mod fs;
|
||||
mod function_param;
|
||||
pub mod lint;
|
||||
pub mod lsp;
|
||||
pub mod parser;
|
||||
@ -28,5 +29,6 @@ pub mod std;
|
||||
pub mod test_server;
|
||||
pub mod thread;
|
||||
pub mod token;
|
||||
pub mod walk;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub mod wasm;
|
||||
|
@ -1,236 +0,0 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
ast::types::{
|
||||
BinaryPart, BodyItem, LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, ObjectProperty,
|
||||
Parameter, Program, UnaryExpression, Value, VariableDeclarator,
|
||||
},
|
||||
lint::Node,
|
||||
};
|
||||
|
||||
/// Walker is implemented by things that are able to walk an AST tree to
|
||||
/// produce lints. This trait is implemented automatically for a few of the
|
||||
/// common types, but can be manually implemented too.
|
||||
pub trait Walker<'a> {
|
||||
/// Walk will visit every element of the AST.
|
||||
fn walk(&self, n: Node<'a>) -> Result<bool>;
|
||||
}
|
||||
|
||||
impl<'a, FnT> Walker<'a> for FnT
|
||||
where
|
||||
FnT: Fn(Node<'a>) -> Result<bool>,
|
||||
{
|
||||
fn walk(&self, n: Node<'a>) -> Result<bool> {
|
||||
self(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Run the Walker against all [Node]s in a [Program].
|
||||
pub fn walk<'a, WalkT>(prog: &'a Program, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(prog.into())?;
|
||||
|
||||
for bi in &prog.body {
|
||||
walk_body_item(bi, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_variable_declarator<'a, WalkT>(node: &'a VariableDeclarator, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
f.walk((&node.id).into())?;
|
||||
walk_value(&node.init, f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_parameter<'a, WalkT>(node: &'a Parameter, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
f.walk((&node.identifier).into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_member_object<'a, WalkT>(node: &'a MemberObject, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_literal_identifier<'a, WalkT>(node: &'a LiteralIdentifier, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_member_expression<'a, WalkT>(node: &'a MemberExpression, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
|
||||
walk_member_object(&node.object, f)?;
|
||||
walk_literal_identifier(&node.property, f)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_binary_part<'a, WalkT>(node: &'a BinaryPart, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
match node {
|
||||
BinaryPart::Literal(lit) => f.walk(lit.as_ref().into())?,
|
||||
BinaryPart::Identifier(id) => f.walk(id.as_ref().into())?,
|
||||
BinaryPart::BinaryExpression(be) => f.walk(be.as_ref().into())?,
|
||||
BinaryPart::CallExpression(ce) => f.walk(ce.as_ref().into())?,
|
||||
BinaryPart::UnaryExpression(ue) => {
|
||||
walk_unary_expression(ue, f)?;
|
||||
true
|
||||
}
|
||||
BinaryPart::MemberExpression(me) => {
|
||||
walk_member_expression(me, f)?;
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn walk_value<'a, WalkT>(node: &'a Value, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
match node {
|
||||
Value::Literal(lit) => {
|
||||
f.walk(lit.as_ref().into())?;
|
||||
}
|
||||
Value::TagDeclarator(tag) => {
|
||||
f.walk(tag.as_ref().into())?;
|
||||
}
|
||||
|
||||
Value::Identifier(id) => {
|
||||
// sometimes there's a bare Identifier without a Value::Identifier.
|
||||
f.walk(id.as_ref().into())?;
|
||||
}
|
||||
|
||||
Value::BinaryExpression(be) => {
|
||||
f.walk(be.as_ref().into())?;
|
||||
|
||||
walk_binary_part(&be.left, f)?;
|
||||
walk_binary_part(&be.right, f)?;
|
||||
}
|
||||
Value::FunctionExpression(fe) => {
|
||||
f.walk(fe.as_ref().into())?;
|
||||
|
||||
for arg in &fe.params {
|
||||
walk_parameter(arg, f)?;
|
||||
}
|
||||
walk(&fe.body, f)?;
|
||||
}
|
||||
Value::CallExpression(ce) => {
|
||||
f.walk(ce.as_ref().into())?;
|
||||
f.walk((&ce.callee).into())?;
|
||||
for e in &ce.arguments {
|
||||
walk_value::<WalkT>(e, f)?;
|
||||
}
|
||||
}
|
||||
Value::PipeExpression(pe) => {
|
||||
f.walk(pe.as_ref().into())?;
|
||||
|
||||
for e in &pe.body {
|
||||
walk_value::<WalkT>(e, f)?;
|
||||
}
|
||||
}
|
||||
Value::PipeSubstitution(ps) => {
|
||||
f.walk(ps.as_ref().into())?;
|
||||
}
|
||||
Value::ArrayExpression(ae) => {
|
||||
f.walk(ae.as_ref().into())?;
|
||||
for e in &ae.elements {
|
||||
walk_value::<WalkT>(e, f)?;
|
||||
}
|
||||
}
|
||||
Value::ObjectExpression(oe) => {
|
||||
walk_object_expression(oe, f)?;
|
||||
}
|
||||
Value::MemberExpression(me) => {
|
||||
walk_member_expression(me, f)?;
|
||||
}
|
||||
Value::UnaryExpression(ue) => {
|
||||
walk_unary_expression(ue, f)?;
|
||||
}
|
||||
Value::None(_) => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Walk through an [ObjectProperty].
|
||||
fn walk_object_property<'a, WalkT>(node: &'a ObjectProperty, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
walk_value(&node.value, f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Walk through an [ObjectExpression].
|
||||
fn walk_object_expression<'a, WalkT>(node: &'a ObjectExpression, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
for prop in &node.properties {
|
||||
walk_object_property(prop, f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// walk through an [UnaryExpression].
|
||||
fn walk_unary_expression<'a, WalkT>(node: &'a UnaryExpression, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())?;
|
||||
walk_binary_part(&node.argument, f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// walk through a [BodyItem].
|
||||
fn walk_body_item<'a, WalkT>(node: &'a BodyItem, f: &WalkT) -> Result<()>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
// We don't walk a BodyItem since it's an enum itself.
|
||||
|
||||
match node {
|
||||
BodyItem::ExpressionStatement(xs) => {
|
||||
f.walk(xs.into())?;
|
||||
walk_value(&xs.expression, f)?;
|
||||
}
|
||||
BodyItem::VariableDeclaration(vd) => {
|
||||
f.walk(vd.into())?;
|
||||
for dec in &vd.declarations {
|
||||
walk_variable_declarator(dec, f)?;
|
||||
}
|
||||
}
|
||||
BodyItem::ReturnStatement(rs) => {
|
||||
f.walk(rs.into())?;
|
||||
walk_value(&rs.argument, f)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
@ -3,10 +3,8 @@ use anyhow::Result;
|
||||
use crate::{
|
||||
ast::types::VariableDeclarator,
|
||||
executor::SourceRange,
|
||||
lint::{
|
||||
rule::{def_finding, Discovered, Finding},
|
||||
Node,
|
||||
},
|
||||
lint::rule::{def_finding, Discovered, Finding},
|
||||
walk::Node,
|
||||
};
|
||||
|
||||
def_finding!(
|
||||
@ -67,7 +65,11 @@ mod tests {
|
||||
assert_finding!(lint_variables, Z0001, "const thicc_nes = 0.5");
|
||||
}
|
||||
|
||||
test_finding!(z0001_full_bad, lint_variables, Z0001, "\
|
||||
test_finding!(
|
||||
z0001_full_bad,
|
||||
lint_variables,
|
||||
Z0001,
|
||||
"\
|
||||
// Define constants
|
||||
const pipeLength = 40
|
||||
const pipeSmallDia = 10
|
||||
@ -96,9 +98,14 @@ const Part001 = startSketchOn('XY')
|
||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||
|> close(%)
|
||||
|> revolve({ axis: 'y' }, %)
|
||||
");
|
||||
"
|
||||
);
|
||||
|
||||
test_no_finding!(z0001_full_good, lint_variables, Z0001, "\
|
||||
test_no_finding!(
|
||||
z0001_full_good,
|
||||
lint_variables,
|
||||
Z0001,
|
||||
"\
|
||||
// Define constants
|
||||
const pipeLength = 40
|
||||
const pipeSmallDia = 10
|
||||
@ -127,5 +134,6 @@ const part001 = startSketchOn('XY')
|
||||
|> angledLineToX({ angle: 60, to: pipeLargeDia }, %)
|
||||
|> close(%)
|
||||
|> revolve({ axis: 'y' }, %)
|
||||
");
|
||||
"
|
||||
);
|
||||
}
|
||||
|
@ -1,9 +1,4 @@
|
||||
mod ast_node;
|
||||
mod ast_walk;
|
||||
pub mod checks;
|
||||
pub mod rule;
|
||||
|
||||
pub use ast_node::Node;
|
||||
pub use ast_walk::walk;
|
||||
// pub(crate) use rule::{def_finding, finding};
|
||||
pub use rule::{Discovered, Finding};
|
||||
|
@ -3,7 +3,7 @@ use schemars::JsonSchema;
|
||||
use serde::Serialize;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||
|
||||
use crate::{executor::SourceRange, lint::Node, lsp::IntoDiagnostic};
|
||||
use crate::{executor::SourceRange, lsp::IntoDiagnostic, walk::Node};
|
||||
|
||||
/// Check the provided AST for any found rule violations.
|
||||
///
|
||||
@ -70,6 +70,10 @@ impl IntoDiagnostic for Discovered {
|
||||
fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||
(&self).to_lsp_diagnostic(code)
|
||||
}
|
||||
|
||||
fn severity(&self) -> DiagnosticSeverity {
|
||||
(&self).severity()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoDiagnostic for &Discovered {
|
||||
@ -79,7 +83,7 @@ impl IntoDiagnostic for &Discovered {
|
||||
|
||||
Diagnostic {
|
||||
range: source_range.to_lsp_range(code),
|
||||
severity: Some(DiagnosticSeverity::INFORMATION),
|
||||
severity: Some(self.severity()),
|
||||
code: None,
|
||||
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||
code_description: None,
|
||||
@ -90,6 +94,10 @@ impl IntoDiagnostic for &Discovered {
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn severity(&self) -> DiagnosticSeverity {
|
||||
DiagnosticSeverity::INFORMATION
|
||||
}
|
||||
}
|
||||
|
||||
/// Abstract lint problem type.
|
||||
|
@ -3,59 +3,15 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use tokio::sync::RwLock;
|
||||
use dashmap::DashMap;
|
||||
use tower_lsp::lsp_types::{
|
||||
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializedParams, MessageType,
|
||||
RenameFilesParams, TextDocumentItem, WorkspaceFolder,
|
||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializedParams, MessageType, RenameFilesParams,
|
||||
TextDocumentItem, WorkspaceFolder,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
fs::FileSystem,
|
||||
lsp::safemap::SafeMap,
|
||||
thread::{JoinHandle, Thread},
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct InnerHandle(Arc<JoinHandle>);
|
||||
|
||||
impl InnerHandle {
|
||||
pub fn new(handle: JoinHandle) -> Self {
|
||||
Self(Arc::new(handle))
|
||||
}
|
||||
|
||||
pub fn is_finished(&self) -> bool {
|
||||
self.0.is_finished()
|
||||
}
|
||||
|
||||
pub fn cancel(&self) {
|
||||
self.0.abort();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UpdateHandle(Arc<RwLock<Option<InnerHandle>>>);
|
||||
|
||||
impl UpdateHandle {
|
||||
pub fn new(handle: InnerHandle) -> Self {
|
||||
Self(Arc::new(RwLock::new(Some(handle))))
|
||||
}
|
||||
|
||||
pub async fn read(&self) -> Option<InnerHandle> {
|
||||
self.0.read().await.clone()
|
||||
}
|
||||
|
||||
pub async fn write(&self, handle: Option<InnerHandle>) {
|
||||
*self.0.write().await = handle;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for UpdateHandle {
|
||||
fn default() -> Self {
|
||||
Self(Arc::new(RwLock::new(None)))
|
||||
}
|
||||
}
|
||||
use crate::fs::FileSystem;
|
||||
|
||||
/// A trait for the backend of the language server.
|
||||
#[async_trait::async_trait]
|
||||
@ -63,18 +19,14 @@ pub trait Backend: Clone + Send + Sync
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
fn client(&self) -> tower_lsp::Client;
|
||||
fn client(&self) -> &tower_lsp::Client;
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager>;
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager>;
|
||||
|
||||
async fn is_initialized(&self) -> bool;
|
||||
|
||||
async fn set_is_initialized(&self, is_initialized: bool);
|
||||
|
||||
async fn current_handle(&self) -> Option<InnerHandle>;
|
||||
|
||||
async fn set_current_handle(&self, handle: Option<InnerHandle>);
|
||||
|
||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
|
||||
|
||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||
@ -82,7 +34,7 @@ where
|
||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||
|
||||
/// Get the current code map.
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>>;
|
||||
fn code_map(&self) -> &DashMap<String, Vec<u8>>;
|
||||
|
||||
/// Insert a new code map.
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
||||
@ -94,62 +46,36 @@ where
|
||||
async fn clear_code_state(&self);
|
||||
|
||||
/// Get the current diagnostics map.
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport>;
|
||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>>;
|
||||
|
||||
/// On change event.
|
||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
||||
|
||||
/// Check if the file has diagnostics.
|
||||
async fn has_diagnostics(&self, uri: &str) -> bool {
|
||||
if let Some(tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics)) =
|
||||
self.current_diagnostics_map().get(uri).await
|
||||
{
|
||||
!diagnostics.full_document_diagnostic_report.items.is_empty()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
let Some(diagnostics) = self.current_diagnostics_map().get(uri) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
!diagnostics.is_empty()
|
||||
}
|
||||
|
||||
async fn on_change(&self, params: TextDocumentItem) {
|
||||
// Check if the document is in the current code map and if it is the same as what we have
|
||||
// stored.
|
||||
let filename = params.uri.to_string();
|
||||
if let Some(current_code) = self.code_map().get(&filename).await {
|
||||
if current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
||||
if let Some(current_code) = self.code_map().get(&filename) {
|
||||
if *current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we already have a handle running.
|
||||
if let Some(current_handle) = self.current_handle().await {
|
||||
self.set_current_handle(None).await;
|
||||
// Drop that handle to cancel it.
|
||||
current_handle.cancel();
|
||||
}
|
||||
println!("on_change after check: {:?}", params);
|
||||
|
||||
let cloned = self.clone();
|
||||
let task = JoinHandle::new(async move {
|
||||
cloned
|
||||
.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
||||
.await;
|
||||
cloned.inner_on_change(params, false).await;
|
||||
cloned.set_current_handle(None).await;
|
||||
});
|
||||
let update_handle = InnerHandle::new(task);
|
||||
|
||||
// Set our new handle.
|
||||
self.set_current_handle(Some(update_handle.clone())).await;
|
||||
}
|
||||
|
||||
async fn wait_on_handle(&self) {
|
||||
while let Some(handle) = self.current_handle().await {
|
||||
if !handle.is_finished() {
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
self.set_current_handle(None).await;
|
||||
self.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
|
||||
.await;
|
||||
println!("on_change after insert: {:?}", params);
|
||||
self.inner_on_change(params, false).await;
|
||||
}
|
||||
|
||||
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
|
||||
@ -211,7 +137,7 @@ where
|
||||
self.remove_workspace_folders(params.event.removed).await;
|
||||
// Remove the code from the current code map.
|
||||
// We do this since it means the user is changing projects so let's refresh the state.
|
||||
if !self.code_map().is_empty().await && should_clear {
|
||||
if !self.code_map().is_empty() && should_clear {
|
||||
self.clear_code_state().await;
|
||||
}
|
||||
for added in params.event.added {
|
||||
|
@ -9,28 +9,27 @@ use std::{
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use dashmap::DashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::{
|
||||
jsonrpc::{Error, Result},
|
||||
lsp_types::{
|
||||
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
|
||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializeParams,
|
||||
InitializeResult, InitializedParams, MessageType, OneOf, RenameFilesParams, ServerCapabilities,
|
||||
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder,
|
||||
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializeResult, InitializedParams,
|
||||
MessageType, OneOf, RenameFilesParams, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability,
|
||||
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
|
||||
WorkspaceServerCapabilities,
|
||||
},
|
||||
LanguageServer,
|
||||
};
|
||||
|
||||
use super::backend::{InnerHandle, UpdateHandle};
|
||||
use crate::lsp::{
|
||||
backend::Backend as _,
|
||||
copilot::types::{
|
||||
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
|
||||
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
|
||||
},
|
||||
safemap::SafeMap,
|
||||
};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
@ -50,9 +49,9 @@ pub struct Backend {
|
||||
/// The file system client to use.
|
||||
pub fs: Arc<crate::fs::FileManager>,
|
||||
/// The workspace folders.
|
||||
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
||||
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
||||
/// Current code.
|
||||
pub code_map: SafeMap<String, Vec<u8>>,
|
||||
pub code_map: DashMap<String, Vec<u8>>,
|
||||
/// The Zoo API client.
|
||||
pub zoo_client: kittycad::Client,
|
||||
/// The editor info is used to store information about the editor.
|
||||
@ -60,21 +59,22 @@ pub struct Backend {
|
||||
/// The cache is used to store the results of previous requests.
|
||||
pub cache: Arc<cache::CopilotCache>,
|
||||
/// Storage so we can send telemetry data back out.
|
||||
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
||||
pub telemetry: DashMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
||||
/// Diagnostics.
|
||||
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
||||
|
||||
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
||||
pub current_handle: UpdateHandle,
|
||||
}
|
||||
|
||||
// Implement the shared backend trait for the language server.
|
||||
#[async_trait::async_trait]
|
||||
impl crate::lsp::backend::Backend for Backend {
|
||||
fn client(&self) -> tower_lsp::Client {
|
||||
self.client.clone()
|
||||
fn client(&self) -> &tower_lsp::Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||
self.fs.clone()
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
||||
&self.fs
|
||||
}
|
||||
|
||||
async fn is_initialized(&self) -> bool {
|
||||
@ -85,48 +85,41 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
*self.is_initialized.write().await = is_initialized;
|
||||
}
|
||||
|
||||
async fn current_handle(&self) -> Option<InnerHandle> {
|
||||
self.current_handle.read().await
|
||||
}
|
||||
|
||||
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
||||
self.current_handle.write(handle).await;
|
||||
}
|
||||
|
||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||
self.workspace_folders.inner().await.values().cloned().collect()
|
||||
// TODO: fix clone
|
||||
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
||||
}
|
||||
|
||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||
for folder in folders {
|
||||
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
||||
self.workspace_folders.insert(folder.name.to_string(), folder);
|
||||
}
|
||||
}
|
||||
|
||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||
for folder in folders {
|
||||
self.workspace_folders.remove(&folder.name).await;
|
||||
self.workspace_folders.remove(&folder.name);
|
||||
}
|
||||
}
|
||||
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||
self.code_map.clone()
|
||||
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
||||
&self.code_map
|
||||
}
|
||||
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||
self.code_map.insert(uri, text).await;
|
||||
self.code_map.insert(uri, text);
|
||||
}
|
||||
|
||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||
self.code_map.remove(&uri).await
|
||||
self.code_map.remove(&uri).map(|(_, v)| v)
|
||||
}
|
||||
|
||||
async fn clear_code_state(&self) {
|
||||
self.code_map.clear().await;
|
||||
self.code_map.clear();
|
||||
}
|
||||
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||
Default::default()
|
||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
||||
&self.diagnostics_map
|
||||
}
|
||||
|
||||
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
|
||||
@ -138,8 +131,15 @@ impl Backend {
|
||||
/// Get completions from the kittycad api.
|
||||
pub async fn get_completions(&self, language: String, prompt: String, suffix: String) -> Result<Vec<String>> {
|
||||
let body = kittycad::types::KclCodeCompletionRequest {
|
||||
prompt: Some(prompt.clone()),
|
||||
suffix: Some(suffix.clone()),
|
||||
extra: Some(kittycad::types::KclCodeCompletionParams {
|
||||
language: Some(language.to_string()),
|
||||
next_indent: None,
|
||||
trim_by_indentation: true,
|
||||
prompt_tokens: Some(prompt.len() as u32),
|
||||
suffix_tokens: Some(suffix.len() as u32),
|
||||
}),
|
||||
prompt: Some(prompt),
|
||||
suffix: Some(suffix),
|
||||
max_tokens: Some(500),
|
||||
temperature: Some(1.0),
|
||||
top_p: Some(1.0),
|
||||
@ -149,13 +149,6 @@ impl Backend {
|
||||
nwo: None,
|
||||
// We haven't implemented streaming yet.
|
||||
stream: false,
|
||||
extra: Some(kittycad::types::KclCodeCompletionParams {
|
||||
language: Some(language.to_string()),
|
||||
next_indent: None,
|
||||
trim_by_indentation: true,
|
||||
prompt_tokens: Some(prompt.len() as u32),
|
||||
suffix_tokens: Some(suffix.len() as u32),
|
||||
}),
|
||||
};
|
||||
|
||||
let resp = self
|
||||
@ -234,7 +227,7 @@ impl Backend {
|
||||
completion: completion.clone(),
|
||||
params: params.clone(),
|
||||
};
|
||||
self.telemetry.insert(completion.uuid, telemetry).await;
|
||||
self.telemetry.insert(completion.uuid, telemetry);
|
||||
}
|
||||
self.cache
|
||||
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
|
||||
@ -248,7 +241,7 @@ impl Backend {
|
||||
.await;
|
||||
|
||||
// Get the original telemetry data.
|
||||
let Some(original) = self.telemetry.remove(¶ms.uuid).await else {
|
||||
let Some(original) = self.telemetry.remove(¶ms.uuid) else {
|
||||
return;
|
||||
};
|
||||
|
||||
@ -267,7 +260,7 @@ impl Backend {
|
||||
// Get the original telemetry data.
|
||||
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
|
||||
for uuid in params.uuids {
|
||||
if let Some(original) = self.telemetry.remove(&uuid).await {
|
||||
if let Some(original) = self.telemetry.remove(&uuid).map(|(_, v)| v) {
|
||||
originals.push(original);
|
||||
}
|
||||
}
|
||||
@ -340,7 +333,7 @@ impl LanguageServer for Backend {
|
||||
}
|
||||
|
||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||
self.do_did_change(params.clone()).await;
|
||||
self.do_did_change(params).await;
|
||||
}
|
||||
|
||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||
|
@ -14,12 +14,13 @@ pub mod custom_notifications;
|
||||
use anyhow::Result;
|
||||
#[cfg(feature = "cli")]
|
||||
use clap::Parser;
|
||||
use dashmap::DashMap;
|
||||
use sha2::Digest;
|
||||
use tower_lsp::{
|
||||
jsonrpc::Result as RpcResult,
|
||||
lsp_types::{
|
||||
CompletionItem, CompletionItemKind, CompletionOptions, CompletionParams, CompletionResponse, CreateFilesParams,
|
||||
DeleteFilesParams, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
||||
DeleteFilesParams, Diagnostic, DiagnosticOptions, DiagnosticServerCapabilities, DiagnosticSeverity,
|
||||
DidChangeConfigurationParams, DidChangeTextDocumentParams, DidChangeWatchedFilesParams,
|
||||
DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams,
|
||||
DidSaveTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||
@ -43,11 +44,7 @@ use crate::lint::checks;
|
||||
use crate::{
|
||||
ast::types::{Value, VariableKind},
|
||||
executor::SourceRange,
|
||||
lsp::{
|
||||
backend::{Backend as _, InnerHandle, UpdateHandle},
|
||||
safemap::SafeMap,
|
||||
util::IntoDiagnostic,
|
||||
},
|
||||
lsp::{backend::Backend as _, util::IntoDiagnostic},
|
||||
parser::PIPE_OPERATOR,
|
||||
token::TokenType,
|
||||
};
|
||||
@ -68,6 +65,9 @@ lazy_static::lazy_static! {
|
||||
vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::DEFAULT_LIBRARY,
|
||||
SemanticTokenModifier::READONLY,
|
||||
SemanticTokenModifier::STATIC,
|
||||
]
|
||||
};
|
||||
}
|
||||
@ -93,25 +93,25 @@ pub struct Backend {
|
||||
/// The file system client to use.
|
||||
pub fs: Arc<crate::fs::FileManager>,
|
||||
/// The workspace folders.
|
||||
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
|
||||
pub workspace_folders: DashMap<String, WorkspaceFolder>,
|
||||
/// The stdlib completions for the language.
|
||||
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||
/// The stdlib signatures for the language.
|
||||
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||
/// Token maps.
|
||||
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
||||
pub token_map: DashMap<String, Vec<crate::token::Token>>,
|
||||
/// AST maps.
|
||||
pub ast_map: SafeMap<String, crate::ast::types::Program>,
|
||||
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
||||
/// Memory maps.
|
||||
pub memory_map: SafeMap<String, crate::executor::ProgramMemory>,
|
||||
pub memory_map: DashMap<String, crate::executor::ProgramMemory>,
|
||||
/// Current code.
|
||||
pub code_map: SafeMap<String, Vec<u8>>,
|
||||
pub code_map: DashMap<String, Vec<u8>>,
|
||||
/// Diagnostics.
|
||||
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
|
||||
pub diagnostics_map: DashMap<String, Vec<Diagnostic>>,
|
||||
/// Symbols map.
|
||||
pub symbols_map: SafeMap<String, Vec<DocumentSymbol>>,
|
||||
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
|
||||
/// Semantic tokens map.
|
||||
pub semantic_tokens_map: SafeMap<String, Vec<SemanticToken>>,
|
||||
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
|
||||
/// The Zoo API client.
|
||||
pub zoo_client: kittycad::Client,
|
||||
/// If we can send telemetry for this user.
|
||||
@ -122,18 +122,17 @@ pub struct Backend {
|
||||
pub can_execute: Arc<RwLock<bool>>,
|
||||
|
||||
pub is_initialized: Arc<RwLock<bool>>,
|
||||
pub current_handle: UpdateHandle,
|
||||
}
|
||||
|
||||
// Implement the shared backend trait for the language server.
|
||||
#[async_trait::async_trait]
|
||||
impl crate::lsp::backend::Backend for Backend {
|
||||
fn client(&self) -> Client {
|
||||
self.client.clone()
|
||||
fn client(&self) -> &Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||
self.fs.clone()
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
||||
&self.fs
|
||||
}
|
||||
|
||||
async fn is_initialized(&self) -> bool {
|
||||
@ -144,84 +143,76 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
*self.is_initialized.write().await = is_initialized;
|
||||
}
|
||||
|
||||
async fn current_handle(&self) -> Option<InnerHandle> {
|
||||
self.current_handle.read().await
|
||||
}
|
||||
|
||||
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
|
||||
self.current_handle.write(handle).await;
|
||||
}
|
||||
|
||||
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
|
||||
self.workspace_folders.inner().await.values().cloned().collect()
|
||||
// TODO: fix clone
|
||||
self.workspace_folders.iter().map(|i| i.clone()).collect()
|
||||
}
|
||||
|
||||
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||
for folder in folders {
|
||||
self.workspace_folders.insert(folder.name.to_string(), folder).await;
|
||||
self.workspace_folders.insert(folder.name.to_string(), folder);
|
||||
}
|
||||
}
|
||||
|
||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
|
||||
for folder in folders {
|
||||
self.workspace_folders.remove(&folder.name).await;
|
||||
self.workspace_folders.remove(&folder.name);
|
||||
}
|
||||
}
|
||||
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||
self.code_map.clone()
|
||||
fn code_map(&self) -> &DashMap<String, Vec<u8>> {
|
||||
&self.code_map
|
||||
}
|
||||
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||
self.code_map.insert(uri, text).await;
|
||||
self.code_map.insert(uri, text);
|
||||
}
|
||||
|
||||
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
|
||||
self.code_map.remove(&uri).await
|
||||
self.code_map.remove(&uri).map(|x| x.1)
|
||||
}
|
||||
|
||||
async fn clear_code_state(&self) {
|
||||
self.code_map.clear().await;
|
||||
self.token_map.clear().await;
|
||||
self.ast_map.clear().await;
|
||||
self.diagnostics_map.clear().await;
|
||||
self.symbols_map.clear().await;
|
||||
self.semantic_tokens_map.clear().await;
|
||||
self.code_map.clear();
|
||||
self.token_map.clear();
|
||||
self.ast_map.clear();
|
||||
self.diagnostics_map.clear();
|
||||
self.symbols_map.clear();
|
||||
self.semantic_tokens_map.clear();
|
||||
}
|
||||
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||
self.diagnostics_map.clone()
|
||||
fn current_diagnostics_map(&self) -> &DashMap<String, Vec<Diagnostic>> {
|
||||
&self.diagnostics_map
|
||||
}
|
||||
|
||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
||||
let filename = params.uri.to_string();
|
||||
// We already updated the code map in the shared backend.
|
||||
|
||||
// Lets update the tokens.
|
||||
let tokens = match crate::token::lexer(¶ms.text) {
|
||||
Ok(tokens) => tokens,
|
||||
Err(err) => {
|
||||
self.add_to_diagnostics(¶ms, err, true).await;
|
||||
self.token_map.remove(¶ms.uri.to_string()).await;
|
||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||
self.semantic_tokens_map.remove(¶ms.uri.to_string()).await;
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
self.add_to_diagnostics(¶ms, &[err], true).await;
|
||||
self.token_map.remove(&filename);
|
||||
self.ast_map.remove(&filename);
|
||||
self.symbols_map.remove(&filename);
|
||||
self.semantic_tokens_map.remove(&filename);
|
||||
self.memory_map.remove(&filename);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Get the previous tokens.
|
||||
let previous_tokens = self.token_map.get(¶ms.uri.to_string()).await;
|
||||
|
||||
// Try to get the memory for the current code.
|
||||
let has_memory = if let Some(memory) = self.memory_map.get(¶ms.uri.to_string()).await {
|
||||
memory != crate::executor::ProgramMemory::default()
|
||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
||||
*memory != crate::executor::ProgramMemory::default()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let tokens_changed = if let Some(previous_tokens) = previous_tokens.clone() {
|
||||
previous_tokens != tokens
|
||||
// Get the previous tokens.
|
||||
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
|
||||
*previous_tokens != tokens
|
||||
} else {
|
||||
true
|
||||
};
|
||||
@ -234,9 +225,9 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
|
||||
if tokens_changed {
|
||||
// Update our token map.
|
||||
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
|
||||
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
||||
// Update our semantic tokens.
|
||||
self.update_semantic_tokens(tokens.clone(), ¶ms).await;
|
||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||
}
|
||||
|
||||
// Lets update the ast.
|
||||
@ -245,19 +236,19 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
let ast = match result {
|
||||
Ok(ast) => ast,
|
||||
Err(err) => {
|
||||
self.add_to_diagnostics(¶ms, err, true).await;
|
||||
self.ast_map.remove(¶ms.uri.to_string()).await;
|
||||
self.symbols_map.remove(¶ms.uri.to_string()).await;
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
self.add_to_diagnostics(¶ms, &[err], true).await;
|
||||
self.ast_map.remove(&filename);
|
||||
self.symbols_map.remove(&filename);
|
||||
self.memory_map.remove(&filename);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Check if the ast changed.
|
||||
let ast_changed = match self.ast_map.get(¶ms.uri.to_string()).await {
|
||||
let ast_changed = match self.ast_map.get(&filename) {
|
||||
Some(old_ast) => {
|
||||
// Check if the ast changed.
|
||||
old_ast != ast
|
||||
*old_ast != ast
|
||||
}
|
||||
None => true,
|
||||
};
|
||||
@ -268,17 +259,15 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
}
|
||||
|
||||
if ast_changed {
|
||||
self.ast_map.insert(params.uri.to_string(), ast.clone()).await;
|
||||
self.ast_map.insert(params.uri.to_string(), ast.clone());
|
||||
// Update the symbols map.
|
||||
self.symbols_map
|
||||
.insert(
|
||||
params.uri.to_string(),
|
||||
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
||||
)
|
||||
.await;
|
||||
self.symbols_map.insert(
|
||||
params.uri.to_string(),
|
||||
ast.get_lsp_symbols(¶ms.text).unwrap_or_default(),
|
||||
);
|
||||
|
||||
// Update our semantic tokens.
|
||||
self.update_semantic_tokens(tokens, ¶ms).await;
|
||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
@ -287,12 +276,7 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
// Clear the lints before we lint.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
||||
.await;
|
||||
for discovered_finding in &discovered_findings {
|
||||
self.add_to_diagnostics(¶ms, discovered_finding, false).await;
|
||||
}
|
||||
self.add_to_diagnostics(¶ms, &discovered_findings, false).await;
|
||||
}
|
||||
}
|
||||
|
||||
@ -308,7 +292,7 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
// Execute the code if we have an executor context.
|
||||
// This function automatically executes if we should & updates the diagnostics if we got
|
||||
// errors.
|
||||
if self.execute(¶ms, ast.clone()).await.is_err() {
|
||||
if self.execute(¶ms, &ast).await.is_err() {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -323,35 +307,22 @@ impl Backend {
|
||||
*self.can_execute.read().await
|
||||
}
|
||||
|
||||
async fn set_can_execute(&self, can_execute: bool) {
|
||||
*self.can_execute.write().await = can_execute;
|
||||
pub async fn executor_ctx(&self) -> tokio::sync::RwLockReadGuard<'_, Option<crate::executor::ExecutorContext>> {
|
||||
self.executor_ctx.read().await
|
||||
}
|
||||
|
||||
pub async fn executor_ctx(&self) -> Option<crate::executor::ExecutorContext> {
|
||||
self.executor_ctx.read().await.clone()
|
||||
}
|
||||
|
||||
async fn set_executor_ctx(&self, executor_ctx: crate::executor::ExecutorContext) {
|
||||
*self.executor_ctx.write().await = Some(executor_ctx);
|
||||
}
|
||||
|
||||
async fn update_semantic_tokens(&self, tokens: Vec<crate::token::Token>, params: &TextDocumentItem) {
|
||||
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
|
||||
// Update the semantic tokens map.
|
||||
let mut semantic_tokens = vec![];
|
||||
let mut last_position = Position::new(0, 0);
|
||||
for token in &tokens {
|
||||
let Ok(mut token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||
for token in tokens {
|
||||
let Ok(token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||
// We continue here because not all tokens can be converted this way, we will get
|
||||
// the rest from the ast.
|
||||
continue;
|
||||
};
|
||||
|
||||
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
|
||||
// This is a stdlib function.
|
||||
token_type = SemanticTokenType::FUNCTION;
|
||||
}
|
||||
|
||||
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||
let mut token_type_index = match self.get_semantic_token_type_index(&token_type) {
|
||||
Some(index) => index,
|
||||
// This is actually bad this should not fail.
|
||||
// The test for listing all semantic token types should make this never happen.
|
||||
@ -366,21 +337,21 @@ impl Backend {
|
||||
}
|
||||
};
|
||||
|
||||
let source_range: SourceRange = token.clone().into();
|
||||
let source_range: SourceRange = token.into();
|
||||
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||
|
||||
// Calculate the token modifiers.
|
||||
// Get the value at the current position.
|
||||
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
|
||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||
crate::lint::walk(&ast, &|node: crate::lint::Node| {
|
||||
crate::walk::walk(&ast, &|node: crate::walk::Node| {
|
||||
let node_range: SourceRange = (&node).into();
|
||||
if !node_range.contains(source_range.start()) {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
|
||||
let get_modifier = |modifier: Vec<SemanticTokenModifier>| -> Result<bool> {
|
||||
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
||||
return Ok(true);
|
||||
@ -394,61 +365,73 @@ impl Backend {
|
||||
};
|
||||
|
||||
match node {
|
||||
crate::lint::Node::TagDeclarator(_) => {
|
||||
return get_modifier(SemanticTokenModifier::DEFINITION);
|
||||
crate::walk::Node::TagDeclarator(_) => {
|
||||
return get_modifier(vec![
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::STATIC,
|
||||
]);
|
||||
}
|
||||
crate::lint::Node::VariableDeclarator(variable) => {
|
||||
let sr: SourceRange = variable.id.clone().into();
|
||||
crate::walk::Node::VariableDeclarator(variable) => {
|
||||
let sr: SourceRange = (&variable.id).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
if let Value::FunctionExpression(_) = &variable.init {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
return get_modifier(vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::READONLY,
|
||||
]);
|
||||
}
|
||||
}
|
||||
crate::lint::Node::Parameter(_) => {
|
||||
crate::walk::Node::Parameter(_) => {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PARAMETER) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
crate::lint::Node::MemberExpression(member_expression) => {
|
||||
let sr: SourceRange = member_expression.property.clone().into();
|
||||
crate::walk::Node::MemberExpression(member_expression) => {
|
||||
let sr: SourceRange = (&member_expression.property).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
crate::lint::Node::ObjectProperty(object_property) => {
|
||||
let sr: SourceRange = object_property.key.clone().into();
|
||||
crate::walk::Node::ObjectProperty(object_property) => {
|
||||
let sr: SourceRange = (&object_property.key).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
return get_modifier(vec![SemanticTokenModifier::DECLARATION]);
|
||||
}
|
||||
crate::lint::Node::CallExpression(call_expr) => {
|
||||
let sr: SourceRange = call_expr.callee.clone().into();
|
||||
crate::walk::Node::CallExpression(call_expr) => {
|
||||
let sr: SourceRange = (&call_expr.callee).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
|
||||
if self.stdlib_completions.contains_key(&call_expr.callee.name) {
|
||||
// This is a stdlib function.
|
||||
return get_modifier(vec![SemanticTokenModifier::DEFAULT_LIBRARY]);
|
||||
}
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
@ -509,15 +492,12 @@ impl Backend {
|
||||
|
||||
last_position = position;
|
||||
}
|
||||
self.semantic_tokens_map
|
||||
.insert(params.uri.to_string(), semantic_tokens)
|
||||
.await;
|
||||
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
|
||||
}
|
||||
|
||||
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
||||
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
|
||||
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items.clone(),
|
||||
_ => vec![],
|
||||
let Some(mut items) = self.diagnostics_map.get_mut(uri.as_str()) else {
|
||||
return;
|
||||
};
|
||||
|
||||
// If we only want to clear a specific severity, do that.
|
||||
@ -527,94 +507,83 @@ impl Backend {
|
||||
items.clear();
|
||||
}
|
||||
|
||||
self.diagnostics_map
|
||||
.insert(
|
||||
uri.to_string(),
|
||||
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||
result_id: None,
|
||||
items: items.clone(),
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
if items.is_empty() {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.client.publish_diagnostics(uri.clone(), items, None).await;
|
||||
// We need to drop the items here.
|
||||
drop(items);
|
||||
|
||||
self.diagnostics_map.remove(uri.as_str());
|
||||
} else {
|
||||
// We don't need to update the map since we used get_mut.
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.client.publish_diagnostics(uri.clone(), items.clone(), None).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_to_diagnostics<DiagT: IntoDiagnostic + std::fmt::Debug>(
|
||||
&self,
|
||||
params: &TextDocumentItem,
|
||||
diagnostic: DiagT,
|
||||
diagnostics: &[DiagT],
|
||||
clear_all_before_add: bool,
|
||||
) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostic))
|
||||
.log_message(MessageType::INFO, format!("adding {:?} to diag", diagnostics))
|
||||
.await;
|
||||
|
||||
let diagnostic = diagnostic.to_lsp_diagnostic(¶ms.text);
|
||||
|
||||
if clear_all_before_add {
|
||||
self.clear_diagnostics_map(¶ms.uri, None).await;
|
||||
} else if diagnostic.severity == Some(DiagnosticSeverity::ERROR) {
|
||||
} else if diagnostics.iter().all(|x| x.severity() == DiagnosticSeverity::ERROR) {
|
||||
// If the diagnostic is an error, it will be the only error we get since that halts
|
||||
// execution.
|
||||
// Clear the diagnostics before we add a new one.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::ERROR))
|
||||
.await;
|
||||
} else if diagnostics
|
||||
.iter()
|
||||
.all(|x| x.severity() == DiagnosticSeverity::INFORMATION)
|
||||
{
|
||||
// If the diagnostic is a lint, we will pass them all to add at once so we need to
|
||||
// clear the old ones.
|
||||
self.clear_diagnostics_map(¶ms.uri, Some(DiagnosticSeverity::INFORMATION))
|
||||
.await;
|
||||
}
|
||||
|
||||
let DocumentDiagnosticReport::Full(mut report) = self
|
||||
.diagnostics_map
|
||||
.get(params.uri.clone().as_str())
|
||||
.await
|
||||
.unwrap_or(DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||
result_id: None,
|
||||
items: vec![],
|
||||
},
|
||||
}))
|
||||
else {
|
||||
unreachable!();
|
||||
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
|
||||
// TODO: Would be awesome to fix the clone here.
|
||||
items.clone()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// Ensure we don't already have this diagnostic.
|
||||
if report
|
||||
.full_document_diagnostic_report
|
||||
.items
|
||||
.iter()
|
||||
.any(|x| x == &diagnostic)
|
||||
{
|
||||
self.client
|
||||
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
||||
.await;
|
||||
return;
|
||||
for diagnostic in diagnostics {
|
||||
let d = diagnostic.to_lsp_diagnostic(¶ms.text);
|
||||
// Make sure we don't duplicate diagnostics.
|
||||
if !items.iter().any(|x| x == &d) {
|
||||
items.push(d);
|
||||
}
|
||||
}
|
||||
|
||||
report.full_document_diagnostic_report.items.push(diagnostic);
|
||||
self.diagnostics_map.insert(params.uri.to_string(), items.clone());
|
||||
|
||||
self.diagnostics_map
|
||||
.insert(params.uri.to_string(), DocumentDiagnosticReport::Full(report.clone()))
|
||||
.await;
|
||||
|
||||
self.client
|
||||
.publish_diagnostics(params.uri.clone(), report.full_document_diagnostic_report.items, None)
|
||||
.await;
|
||||
self.client.publish_diagnostics(params.uri.clone(), items, None).await;
|
||||
}
|
||||
|
||||
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) -> Result<()> {
|
||||
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
|
||||
// Check if we can execute.
|
||||
if !self.can_execute().await {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Execute the code if we have an executor context.
|
||||
let Some(executor_ctx) = self.executor_ctx().await else {
|
||||
let ctx = self.executor_ctx().await;
|
||||
let Some(ref executor_ctx) = *ctx else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@ -629,17 +598,16 @@ impl Backend {
|
||||
let memory = match executor_ctx.run(ast, None).await {
|
||||
Ok(memory) => memory,
|
||||
Err(err) => {
|
||||
self.memory_map.remove(¶ms.uri.to_string()).await;
|
||||
self.add_to_diagnostics(params, err, false).await;
|
||||
self.memory_map.remove(params.uri.as_str());
|
||||
self.add_to_diagnostics(params, &[err], false).await;
|
||||
|
||||
// Since we already published the diagnostics we don't really care about the error
|
||||
// string.
|
||||
return Err(anyhow::anyhow!("failed to execute code"));
|
||||
}
|
||||
};
|
||||
drop(executor_ctx);
|
||||
|
||||
self.memory_map.insert(params.uri.to_string(), memory.clone()).await;
|
||||
self.memory_map.insert(params.uri.to_string(), memory.clone());
|
||||
|
||||
// Send the notification to the client that the memory was updated.
|
||||
self.client
|
||||
@ -649,22 +617,36 @@ impl Backend {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
|
||||
pub fn get_semantic_token_type_index(&self, token_type: &SemanticTokenType) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_TYPES
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.position(|x| *x == *token_type)
|
||||
.map(|y| y as u32)
|
||||
}
|
||||
|
||||
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_MODIFIERS
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.map(|y| y as u32)
|
||||
pub fn get_semantic_token_modifier_index(&self, token_types: Vec<SemanticTokenModifier>) -> Option<u32> {
|
||||
if token_types.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut modifier = None;
|
||||
for token_type in token_types {
|
||||
if let Some(index) = SEMANTIC_TOKEN_MODIFIERS
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.map(|y| y as u32)
|
||||
{
|
||||
modifier = match modifier {
|
||||
Some(modifier) => Some(modifier | index),
|
||||
None => Some(index),
|
||||
};
|
||||
}
|
||||
}
|
||||
modifier
|
||||
}
|
||||
|
||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||
let ast = match self.ast_map.get(file_name).await {
|
||||
let ast = match self.ast_map.get(file_name) {
|
||||
Some(ast) => ast,
|
||||
None => return vec![],
|
||||
};
|
||||
@ -681,7 +663,9 @@ impl Backend {
|
||||
// Collect all the file data we know.
|
||||
let mut buf = vec![];
|
||||
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
|
||||
for (entry, value) in self.code_map.inner().await.iter() {
|
||||
for code in self.code_map.iter() {
|
||||
let entry = code.key();
|
||||
let value = code.value();
|
||||
let file_name = entry.replace("file://", "").to_string();
|
||||
|
||||
let options = zip::write::SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
||||
@ -717,7 +701,7 @@ impl Backend {
|
||||
// Get the workspace folders.
|
||||
// The key of the workspace folder is the project name.
|
||||
let workspace_folders = self.workspace_folders().await;
|
||||
let project_names: Vec<String> = workspace_folders.iter().map(|v| v.name.clone()).collect::<Vec<_>>();
|
||||
let project_names: Vec<&str> = workspace_folders.iter().map(|v| v.name.as_str()).collect::<Vec<_>>();
|
||||
// Get the first name.
|
||||
let project_name = project_names
|
||||
.first()
|
||||
@ -764,7 +748,9 @@ impl Backend {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
{
|
||||
let Some(mut executor_ctx) = self.executor_ctx().await else {
|
||||
let mut ctx = self.executor_ctx.write().await;
|
||||
// Borrow the executor context mutably.
|
||||
let Some(ref mut executor_ctx) = *ctx else {
|
||||
self.client
|
||||
.log_message(MessageType::ERROR, "no executor context set to update units for")
|
||||
.await;
|
||||
@ -776,8 +762,8 @@ impl Backend {
|
||||
.await;
|
||||
|
||||
// Try to get the memory for the current code.
|
||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename).await {
|
||||
memory != crate::executor::ProgramMemory::default()
|
||||
let has_memory = if let Some(memory) = self.memory_map.get(&filename) {
|
||||
*memory != crate::executor::ProgramMemory::default()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
@ -792,10 +778,6 @@ impl Backend {
|
||||
|
||||
// Set the engine units.
|
||||
executor_ctx.update_units(params.units);
|
||||
|
||||
// Update the locked executor context.
|
||||
self.set_executor_ctx(executor_ctx.clone()).await;
|
||||
drop(executor_ctx);
|
||||
}
|
||||
// Lock is dropped here since nested.
|
||||
// This is IMPORTANT.
|
||||
@ -823,20 +805,13 @@ impl Backend {
|
||||
&self,
|
||||
params: custom_notifications::UpdateCanExecuteParams,
|
||||
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
|
||||
let can_execute = self.can_execute().await;
|
||||
let mut can_execute = self.can_execute.write().await;
|
||||
|
||||
if can_execute == params.can_execute {
|
||||
if *can_execute == params.can_execute {
|
||||
return Ok(custom_notifications::UpdateCanExecuteResponse {});
|
||||
}
|
||||
|
||||
if !params.can_execute {
|
||||
// Kill any in progress executions.
|
||||
if let Some(current_handle) = self.current_handle().await {
|
||||
current_handle.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
self.set_can_execute(params.can_execute).await;
|
||||
*can_execute = params.can_execute;
|
||||
|
||||
Ok(custom_notifications::UpdateCanExecuteResponse {})
|
||||
}
|
||||
@ -949,7 +924,7 @@ impl LanguageServer for Backend {
|
||||
}
|
||||
|
||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||
self.do_did_change(params.clone()).await;
|
||||
self.do_did_change(params).await;
|
||||
}
|
||||
|
||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||
@ -988,7 +963,7 @@ impl LanguageServer for Backend {
|
||||
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||
|
||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||
let Some(current_code) = self.code_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||
@ -998,7 +973,7 @@ impl LanguageServer for Backend {
|
||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||
|
||||
// Let's iterate over the AST and find the node that contains the cursor.
|
||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||
let Some(ast) = self.ast_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
@ -1031,7 +1006,11 @@ impl LanguageServer for Backend {
|
||||
value: format!(
|
||||
"```{}{}```\n{}",
|
||||
name,
|
||||
label_details.detail.clone().unwrap_or_default(),
|
||||
if let Some(detail) = &label_details.detail {
|
||||
detail
|
||||
} else {
|
||||
""
|
||||
},
|
||||
docs
|
||||
),
|
||||
}),
|
||||
@ -1090,7 +1069,7 @@ impl LanguageServer for Backend {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
// Get the current diagnostics for this file.
|
||||
let Some(diagnostic) = self.diagnostics_map.get(&filename).await else {
|
||||
let Some(items) = self.diagnostics_map.get(&filename) else {
|
||||
// Send an empty report.
|
||||
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||
RelatedFullDocumentDiagnosticReport {
|
||||
@ -1103,13 +1082,21 @@ impl LanguageServer for Backend {
|
||||
)));
|
||||
};
|
||||
|
||||
Ok(DocumentDiagnosticReportResult::Report(diagnostic.clone()))
|
||||
Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||
RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||
result_id: None,
|
||||
items: items.clone(),
|
||||
},
|
||||
},
|
||||
)))
|
||||
}
|
||||
|
||||
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
||||
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||
|
||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||
let Some(current_code) = self.code_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||
@ -1119,7 +1106,7 @@ impl LanguageServer for Backend {
|
||||
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
|
||||
|
||||
// Let's iterate over the AST and find the node that contains the cursor.
|
||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||
let Some(ast) = self.ast_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
@ -1153,7 +1140,7 @@ impl LanguageServer for Backend {
|
||||
|
||||
signature.active_parameter = Some(parameter_index);
|
||||
|
||||
Ok(Some(signature.clone()))
|
||||
Ok(Some(signature))
|
||||
}
|
||||
crate::ast::types::Hover::Comment { value: _, range: _ } => {
|
||||
return Ok(None);
|
||||
@ -1170,7 +1157,7 @@ impl LanguageServer for Backend {
|
||||
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename).await else {
|
||||
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
@ -1183,7 +1170,7 @@ impl LanguageServer for Backend {
|
||||
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
let Some(symbols) = self.symbols_map.get(&filename).await else {
|
||||
let Some(symbols) = self.symbols_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
@ -1193,7 +1180,7 @@ impl LanguageServer for Backend {
|
||||
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||
let Some(current_code) = self.code_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||
@ -1230,7 +1217,7 @@ impl LanguageServer for Backend {
|
||||
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
||||
let filename = params.text_document_position.text_document.uri.to_string();
|
||||
|
||||
let Some(current_code) = self.code_map.get(&filename).await else {
|
||||
let Some(current_code) = self.code_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Ok(current_code) = std::str::from_utf8(¤t_code) else {
|
||||
@ -1273,7 +1260,7 @@ impl LanguageServer for Backend {
|
||||
let filename = params.text_document.uri.to_string();
|
||||
|
||||
// Get the ast.
|
||||
let Some(ast) = self.ast_map.get(&filename).await else {
|
||||
let Some(ast) = self.ast_map.get(&filename) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
|
@ -3,7 +3,8 @@
|
||||
pub mod backend;
|
||||
pub mod copilot;
|
||||
pub mod kcl;
|
||||
mod safemap;
|
||||
#[cfg(any(test, feature = "lsp-test-util"))]
|
||||
pub mod test_util;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod util;
|
||||
|
@ -1,60 +0,0 @@
|
||||
//! A map type that is safe to use in a concurrent environment.
|
||||
//! But also in wasm.
|
||||
//! Previously, we used `dashmap::DashMap` for this purpose, but it doesn't work in wasm.
|
||||
|
||||
use std::{borrow::Borrow, collections::HashMap, hash::Hash, sync::Arc};
|
||||
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
/// A thread-safe map type.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SafeMap<K: Eq + Hash + Clone, V: Clone>(Arc<RwLock<HashMap<K, V>>>);
|
||||
|
||||
impl<K: Eq + Hash + Clone, V: Clone> SafeMap<K, V> {
|
||||
/// Create a new empty map.
|
||||
pub fn new() -> Self {
|
||||
SafeMap(Arc::new(RwLock::new(HashMap::new())))
|
||||
}
|
||||
|
||||
pub async fn len(&self) -> usize {
|
||||
self.0.read().await.len()
|
||||
}
|
||||
|
||||
pub async fn is_empty(&self) -> bool {
|
||||
self.0.read().await.is_empty()
|
||||
}
|
||||
|
||||
pub async fn clear(&self) {
|
||||
self.0.write().await.clear();
|
||||
}
|
||||
|
||||
/// Insert a key-value pair into the map.
|
||||
pub async fn insert(&self, key: K, value: V) {
|
||||
self.0.write().await.insert(key, value);
|
||||
}
|
||||
|
||||
/// Get a reference to the value associated with the given key.
|
||||
pub async fn get<Q>(&self, key: &Q) -> Option<V>
|
||||
where
|
||||
K: Borrow<Q>,
|
||||
Q: Hash + Eq + ?Sized,
|
||||
{
|
||||
self.0.read().await.get(key).cloned()
|
||||
}
|
||||
|
||||
/// Remove the key-value pair associated with the given key.
|
||||
pub async fn remove(&self, key: &K) -> Option<V> {
|
||||
self.0.write().await.remove(key)
|
||||
}
|
||||
|
||||
/// Get a reference to the underlying map.
|
||||
pub async fn inner(&self) -> HashMap<K, V> {
|
||||
self.0.read().await.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash + Clone, V: Clone> Default for SafeMap<K, V> {
|
||||
fn default() -> Self {
|
||||
SafeMap::new()
|
||||
}
|
||||
}
|
112
src/wasm-lib/kcl/src/lsp/test_util.rs
Normal file
112
src/wasm-lib/kcl/src/lsp/test_util.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use anyhow::Result;
|
||||
use tower_lsp::LanguageServer;
|
||||
|
||||
fn new_zoo_client() -> kittycad::Client {
|
||||
let user_agent = concat!(env!("CARGO_PKG_NAME"), ".rs/", env!("CARGO_PKG_VERSION"),);
|
||||
let http_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
// For file conversions we need this to be long.
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60));
|
||||
let ws_client = reqwest::Client::builder()
|
||||
.user_agent(user_agent)
|
||||
// For file conversions we need this to be long.
|
||||
.timeout(std::time::Duration::from_secs(600))
|
||||
.connect_timeout(std::time::Duration::from_secs(60))
|
||||
.connection_verbose(true)
|
||||
.tcp_keepalive(std::time::Duration::from_secs(600))
|
||||
.http1_only();
|
||||
|
||||
let token = std::env::var("KITTYCAD_API_TOKEN").expect("KITTYCAD_API_TOKEN not set");
|
||||
|
||||
// Create the client.
|
||||
let mut client = kittycad::Client::new_from_reqwest(token, http_client, ws_client);
|
||||
// Set a local engine address if it's set.
|
||||
if let Ok(addr) = std::env::var("LOCAL_ENGINE_ADDR") {
|
||||
client.set_base_url(addr);
|
||||
}
|
||||
|
||||
client
|
||||
}
|
||||
|
||||
// Create a fake kcl lsp server for testing.
|
||||
pub async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
||||
let stdlib = crate::std::StdLib::new();
|
||||
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
||||
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
||||
|
||||
let zoo_client = new_zoo_client();
|
||||
|
||||
let executor_ctx = if execute {
|
||||
Some(crate::executor::ExecutorContext::new(&zoo_client, Default::default()).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let can_execute = executor_ctx.is_some();
|
||||
|
||||
// Create the backend.
|
||||
let (service, _) = tower_lsp::LspService::build(|client| crate::lsp::kcl::Backend {
|
||||
client,
|
||||
fs: Arc::new(crate::fs::FileManager::new()),
|
||||
workspace_folders: Default::default(),
|
||||
stdlib_completions,
|
||||
stdlib_signatures,
|
||||
token_map: Default::default(),
|
||||
ast_map: Default::default(),
|
||||
memory_map: Default::default(),
|
||||
code_map: Default::default(),
|
||||
diagnostics_map: Default::default(),
|
||||
symbols_map: Default::default(),
|
||||
semantic_tokens_map: Default::default(),
|
||||
zoo_client,
|
||||
can_send_telemetry: true,
|
||||
executor_ctx: Arc::new(tokio::sync::RwLock::new(executor_ctx)),
|
||||
can_execute: Arc::new(tokio::sync::RwLock::new(can_execute)),
|
||||
is_initialized: Default::default(),
|
||||
})
|
||||
.custom_method("kcl/updateUnits", crate::lsp::kcl::Backend::update_units)
|
||||
.custom_method("kcl/updateCanExecute", crate::lsp::kcl::Backend::update_can_execute)
|
||||
.finish();
|
||||
|
||||
let server = service.inner();
|
||||
|
||||
server
|
||||
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
||||
.await?;
|
||||
|
||||
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
||||
|
||||
Ok(server.clone())
|
||||
}
|
||||
|
||||
// Create a fake copilot lsp server for testing.
|
||||
pub async fn copilot_lsp_server() -> Result<crate::lsp::copilot::Backend> {
|
||||
// We don't actually need to authenticate to the backend for this test.
|
||||
let zoo_client = kittycad::Client::new_from_env();
|
||||
|
||||
// Create the backend.
|
||||
let (service, _) = tower_lsp::LspService::new(|client| crate::lsp::copilot::Backend {
|
||||
client,
|
||||
fs: Arc::new(crate::fs::FileManager::new()),
|
||||
workspace_folders: Default::default(),
|
||||
code_map: Default::default(),
|
||||
zoo_client,
|
||||
editor_info: Arc::new(RwLock::new(crate::lsp::copilot::types::CopilotEditorInfo::default())),
|
||||
cache: Arc::new(crate::lsp::copilot::cache::CopilotCache::new()),
|
||||
telemetry: Default::default(),
|
||||
is_initialized: Default::default(),
|
||||
diagnostics_map: Default::default(),
|
||||
});
|
||||
let server = service.inner();
|
||||
|
||||
server
|
||||
.initialize(tower_lsp::lsp_types::InitializeParams::default())
|
||||
.await?;
|
||||
|
||||
server.initialized(tower_lsp::lsp_types::InitializedParams {}).await;
|
||||
|
||||
Ok(server.clone())
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -37,4 +37,7 @@ pub fn get_line_before(pos: Position, rope: &Rope) -> Option<String> {
|
||||
pub trait IntoDiagnostic {
|
||||
/// Convert the traited object to a [lsp_types::Diagnostic].
|
||||
fn to_lsp_diagnostic(&self, text: &str) -> Diagnostic;
|
||||
|
||||
/// Get the severity of the diagnostic.
|
||||
fn severity(&self) -> tower_lsp::lsp_types::DiagnosticSeverity;
|
||||
}
|
||||
|
@ -474,11 +474,7 @@ fn integer_range(i: TokenSlice) -> PResult<Vec<Value>> {
|
||||
}
|
||||
|
||||
fn object_property(i: TokenSlice) -> PResult<ObjectProperty> {
|
||||
let key = identifier
|
||||
.context(expected(
|
||||
"the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key",
|
||||
))
|
||||
.parse_next(i)?;
|
||||
let key = identifier.context(expected("the property's key (the name or identifier of the property), e.g. in 'height: 4', 'height' is the property key")).parse_next(i)?;
|
||||
colon
|
||||
.context(expected(
|
||||
"a colon, which separates the property's key from the value you're setting it to, e.g. 'height: 4'",
|
||||
@ -588,12 +584,9 @@ fn member_expression_subscript(i: TokenSlice) -> PResult<(LiteralIdentifier, usi
|
||||
fn member_expression(i: TokenSlice) -> PResult<MemberExpression> {
|
||||
// This is an identifier, followed by a sequence of members (aka properties)
|
||||
// First, the identifier.
|
||||
let id = identifier
|
||||
.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier"))
|
||||
.parse_next(i)?;
|
||||
let id = identifier.context(expected("the identifier of the object whose property you're trying to access, e.g. in 'shape.size.width', 'shape' is the identifier")).parse_next(i)?;
|
||||
// Now a sequence of members.
|
||||
let member = alt((member_expression_dot, member_expression_subscript))
|
||||
.context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
||||
let member = alt((member_expression_dot, member_expression_subscript)).context(expected("a member/property, e.g. size.x and size['height'] and size[0] are all different ways to access a member/property of 'size'"));
|
||||
let mut members: Vec<_> = repeat(1.., member)
|
||||
.context(expected("a sequence of at least one members/properties"))
|
||||
.parse_next(i)?;
|
||||
@ -1111,19 +1104,9 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
|
||||
// TODO: negation. Original parser doesn't support `not` yet.
|
||||
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: token.as_source_ranges(),
|
||||
message: format!(
|
||||
"{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)",
|
||||
token.value.as_str(),
|
||||
),
|
||||
})),
|
||||
other => Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: token.as_source_ranges(),
|
||||
message: format!(
|
||||
"{EXPECTED} but found {} which is {}",
|
||||
token.value.as_str(),
|
||||
other,
|
||||
),
|
||||
message: format!("{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)", token.value.as_str(),),
|
||||
})),
|
||||
other => Err(KclError::Syntax(KclErrorDetails { source_ranges: token.as_source_ranges(), message: format!("{EXPECTED} but found {} which is {}", token.value.as_str(), other,) })),
|
||||
})
|
||||
.context(expected("a unary expression, e.g. -x or -3"))
|
||||
.parse_next(i)?;
|
||||
@ -1691,7 +1674,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
start0.value,
|
||||
NonCodeValue::BlockComment {
|
||||
value: "comment at start".to_owned(),
|
||||
style: CommentStyle::Block,
|
||||
style: CommentStyle::Block
|
||||
}
|
||||
);
|
||||
assert_eq!(start1.value, NonCodeValue::NewLine);
|
||||
@ -1756,8 +1739,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
start: 32,
|
||||
end: 33,
|
||||
value: 2u32.into(),
|
||||
raw: "2".to_owned(),
|
||||
})),
|
||||
raw: "2".to_owned()
|
||||
}))
|
||||
})],
|
||||
non_code_meta: NonCodeMeta {
|
||||
non_code_nodes: Default::default(),
|
||||
@ -1765,7 +1748,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
start: 7,
|
||||
end: 25,
|
||||
value: NonCodeValue::NewLine
|
||||
}],
|
||||
}]
|
||||
},
|
||||
},
|
||||
return_type: None,
|
||||
@ -1790,7 +1773,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
non_code_meta.non_code_nodes.get(&2).unwrap()[0].value,
|
||||
NonCodeValue::InlineComment {
|
||||
value: "inline-comment".to_owned(),
|
||||
style: CommentStyle::Line,
|
||||
style: CommentStyle::Line
|
||||
}
|
||||
);
|
||||
assert_eq!(body.len(), 4);
|
||||
@ -1815,8 +1798,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
end: 20,
|
||||
value: NonCodeValue::BlockComment {
|
||||
value: "this is a comment".to_owned(),
|
||||
style: CommentStyle::Line,
|
||||
},
|
||||
style: CommentStyle::Line
|
||||
}
|
||||
}],
|
||||
non_code_meta.start,
|
||||
);
|
||||
@ -1827,13 +1810,13 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
end: 82,
|
||||
value: NonCodeValue::InlineComment {
|
||||
value: "block\n comment".to_owned(),
|
||||
style: CommentStyle::Block,
|
||||
},
|
||||
style: CommentStyle::Block
|
||||
}
|
||||
},
|
||||
NonCodeNode {
|
||||
start: 82,
|
||||
end: 86,
|
||||
value: NonCodeValue::NewLine,
|
||||
value: NonCodeValue::NewLine
|
||||
},
|
||||
]),
|
||||
non_code_meta.non_code_nodes.get(&0),
|
||||
@ -1844,8 +1827,8 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
end: 129,
|
||||
value: NonCodeValue::BlockComment {
|
||||
value: "this is also a comment".to_owned(),
|
||||
style: CommentStyle::Line,
|
||||
},
|
||||
style: CommentStyle::Line
|
||||
}
|
||||
}]),
|
||||
non_code_meta.non_code_nodes.get(&1),
|
||||
);
|
||||
@ -1864,7 +1847,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
actual.non_code_meta.non_code_nodes.get(&0).unwrap()[0].value,
|
||||
NonCodeValue::InlineComment {
|
||||
value: "block\n comment".to_owned(),
|
||||
style: CommentStyle::Block,
|
||||
style: CommentStyle::Block
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -1912,7 +1895,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|
||||
start: 9,
|
||||
end: 10,
|
||||
value: 3u32.into(),
|
||||
raw: "3".to_owned(),
|
||||
raw: "3".to_owned()
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
@ -567,7 +567,7 @@ mod tests {
|
||||
project_name: Some("assembly".to_string()),
|
||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects/assembly".to_string(),
|
||||
current_file_name: None,
|
||||
current_file_path: None,
|
||||
current_file_path: None
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -586,7 +586,7 @@ mod tests {
|
||||
project_name: None,
|
||||
project_path: "/Users/macinatormax/Documents/kittycad-modeling-projects".to_string(),
|
||||
current_file_name: None,
|
||||
current_file_path: None,
|
||||
current_file_path: None
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -624,7 +624,7 @@ mod tests {
|
||||
project_name: Some("modeling-app".to_string()),
|
||||
project_path: "/Users/macinatormax/kittycad/modeling-app".to_string(),
|
||||
current_file_name: None,
|
||||
current_file_path: None,
|
||||
current_file_path: None
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -642,7 +642,7 @@ mod tests {
|
||||
project_name: Some("browser".to_string()),
|
||||
project_path: "/browser".to_string(),
|
||||
current_file_name: Some("main.kcl".to_string()),
|
||||
current_file_path: Some("/browser/main.kcl".to_string()),
|
||||
current_file_path: Some("/browser/main.kcl".to_string())
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -660,7 +660,7 @@ mod tests {
|
||||
project_name: Some("browser".to_string()),
|
||||
project_path: "/browser".to_string(),
|
||||
current_file_name: None,
|
||||
current_file_path: None,
|
||||
current_file_path: None
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -1046,13 +1046,7 @@ const model = import("model.obj")"#
|
||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.toml")).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
format!(
|
||||
"File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
||||
tmp_project_dir.join("settings.toml").display()
|
||||
)
|
||||
);
|
||||
assert_eq!(result.unwrap_err().to_string(), format!("File type (toml) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.toml").display()));
|
||||
|
||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||
}
|
||||
@ -1067,13 +1061,7 @@ const model = import("model.obj")"#
|
||||
let result = super::ProjectState::new_from_path(tmp_project_dir.join("settings.docx")).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
format!(
|
||||
"File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl",
|
||||
tmp_project_dir.join("settings.docx").display()
|
||||
)
|
||||
);
|
||||
assert_eq!(result.unwrap_err().to_string(), format!("File type (docx) cannot be opened with this app: `{}`, try opening one of the following file types: stp, glb, fbxb, fbx, gltf, obj, ply, sldprt, step, stl, kcl", tmp_project_dir.join("settings.docx").display()));
|
||||
|
||||
std::fs::remove_dir_all(tmp_project_dir).unwrap();
|
||||
}
|
||||
|
@ -640,7 +640,7 @@ textWrapping = true
|
||||
app: AppSettings {
|
||||
appearance: AppearanceSettings {
|
||||
theme: AppTheme::Dark,
|
||||
color: Default::default(),
|
||||
color: Default::default()
|
||||
},
|
||||
onboarding_status: OnboardingStatus::Dismissed,
|
||||
project_directory: None,
|
||||
@ -654,15 +654,15 @@ textWrapping = true
|
||||
mouse_controls: Default::default(),
|
||||
highlight_edges: Default::default(),
|
||||
show_debug_panel: true,
|
||||
enable_ssao: false.into(),
|
||||
enable_ssao: false.into()
|
||||
},
|
||||
text_editor: TextEditorSettings {
|
||||
text_wrapping: true.into(),
|
||||
blinking_cursor: true.into(),
|
||||
blinking_cursor: true.into()
|
||||
},
|
||||
project: Default::default(),
|
||||
command_bar: CommandBarSettings {
|
||||
include_settings: true.into(),
|
||||
include_settings: true.into()
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -698,7 +698,7 @@ includeSettings = false
|
||||
app: AppSettings {
|
||||
appearance: AppearanceSettings {
|
||||
theme: AppTheme::Dark,
|
||||
color: 138.0.into(),
|
||||
color: 138.0.into()
|
||||
},
|
||||
onboarding_status: Default::default(),
|
||||
project_directory: None,
|
||||
@ -712,15 +712,15 @@ includeSettings = false
|
||||
mouse_controls: Default::default(),
|
||||
highlight_edges: Default::default(),
|
||||
show_debug_panel: true,
|
||||
enable_ssao: true.into(),
|
||||
enable_ssao: true.into()
|
||||
},
|
||||
text_editor: TextEditorSettings {
|
||||
text_wrapping: false.into(),
|
||||
blinking_cursor: false.into(),
|
||||
blinking_cursor: false.into()
|
||||
},
|
||||
project: Default::default(),
|
||||
command_bar: CommandBarSettings {
|
||||
include_settings: false.into(),
|
||||
include_settings: false.into()
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -761,7 +761,7 @@ defaultProjectName = "projects-$nnn"
|
||||
app: AppSettings {
|
||||
appearance: AppearanceSettings {
|
||||
theme: AppTheme::Dark,
|
||||
color: 138.0.into(),
|
||||
color: 138.0.into()
|
||||
},
|
||||
onboarding_status: OnboardingStatus::Dismissed,
|
||||
project_directory: None,
|
||||
@ -775,18 +775,18 @@ defaultProjectName = "projects-$nnn"
|
||||
mouse_controls: Default::default(),
|
||||
highlight_edges: Default::default(),
|
||||
show_debug_panel: true,
|
||||
enable_ssao: true.into(),
|
||||
enable_ssao: true.into()
|
||||
},
|
||||
text_editor: TextEditorSettings {
|
||||
text_wrapping: false.into(),
|
||||
blinking_cursor: false.into(),
|
||||
blinking_cursor: false.into()
|
||||
},
|
||||
project: ProjectSettings {
|
||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||
default_project_name: "projects-$nnn".to_string().into(),
|
||||
default_project_name: "projects-$nnn".to_string().into()
|
||||
},
|
||||
command_bar: CommandBarSettings {
|
||||
include_settings: false.into(),
|
||||
include_settings: false.into()
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -836,7 +836,7 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
||||
app: AppSettings {
|
||||
appearance: AppearanceSettings {
|
||||
theme: AppTheme::System,
|
||||
color: Default::default(),
|
||||
color: Default::default()
|
||||
},
|
||||
onboarding_status: OnboardingStatus::Dismissed,
|
||||
project_directory: None,
|
||||
@ -850,15 +850,15 @@ projectDirectory = "/Users/macinatormax/Documents/kittycad-modeling-projects""#;
|
||||
mouse_controls: Default::default(),
|
||||
highlight_edges: true.into(),
|
||||
show_debug_panel: false,
|
||||
enable_ssao: true.into(),
|
||||
enable_ssao: true.into()
|
||||
},
|
||||
text_editor: TextEditorSettings {
|
||||
text_wrapping: true.into(),
|
||||
blinking_cursor: true.into(),
|
||||
blinking_cursor: true.into()
|
||||
},
|
||||
project: ProjectSettings {
|
||||
directory: "/Users/macinatormax/Documents/kittycad-modeling-projects".into(),
|
||||
default_project_name: "project-$nnn".to_string().into(),
|
||||
default_project_name: "project-$nnn".to_string().into()
|
||||
},
|
||||
command_bar: CommandBarSettings {
|
||||
include_settings: true.into()
|
||||
|
@ -115,7 +115,7 @@ includeSettings = false
|
||||
app: AppSettings {
|
||||
appearance: AppearanceSettings {
|
||||
theme: AppTheme::Dark,
|
||||
color: 138.0.into(),
|
||||
color: 138.0.into()
|
||||
},
|
||||
onboarding_status: Default::default(),
|
||||
project_directory: None,
|
||||
@ -129,14 +129,14 @@ includeSettings = false
|
||||
mouse_controls: Default::default(),
|
||||
highlight_edges: Default::default(),
|
||||
show_debug_panel: true,
|
||||
enable_ssao: true.into(),
|
||||
enable_ssao: true.into()
|
||||
},
|
||||
text_editor: TextEditorSettings {
|
||||
text_wrapping: false.into(),
|
||||
blinking_cursor: false.into(),
|
||||
blinking_cursor: false.into()
|
||||
},
|
||||
command_bar: CommandBarSettings {
|
||||
include_settings: false.into(),
|
||||
include_settings: false.into()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -85,9 +85,9 @@ async fn inner_chamfer(
|
||||
// error to the user that they can only tag one edge at a time.
|
||||
if tag.is_some() && data.tags.len() > 1 {
|
||||
return Err(KclError::Type(KclErrorDetails {
|
||||
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
let mut fillet_or_chamfers = Vec::new();
|
||||
|
@ -314,10 +314,7 @@ fn get_import_format_from_extension(ext: &str) -> Result<kittycad::types::InputF
|
||||
} else if ext == "glb" {
|
||||
kittycad::types::FileImportFormat::Gltf
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.",
|
||||
ext
|
||||
)
|
||||
anyhow::bail!("unknown source format for file extension: {}. Try setting the `--src-format` flag explicitly or use a valid format.", ext)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -28,7 +28,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
ast::types::{parse_json_number_as_f64, TagDeclarator},
|
||||
ast::types::{parse_json_number_as_f64, FunctionExpression, TagDeclarator},
|
||||
docs::StdLibFn,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
executor::{
|
||||
@ -85,6 +85,7 @@ lazy_static! {
|
||||
Box::new(crate::std::patterns::PatternLinear3D),
|
||||
Box::new(crate::std::patterns::PatternCircular2D),
|
||||
Box::new(crate::std::patterns::PatternCircular3D),
|
||||
Box::new(crate::std::patterns::PatternTransform),
|
||||
Box::new(crate::std::chamfer::Chamfer),
|
||||
Box::new(crate::std::fillet::Fillet),
|
||||
Box::new(crate::std::fillet::GetOppositeEdge),
|
||||
@ -351,6 +352,39 @@ impl Args {
|
||||
Ok(numbers)
|
||||
}
|
||||
|
||||
fn get_pattern_transform_args(&self) -> Result<(u32, FnAsArg<'_>, ExtrudeGroupSet), KclError> {
|
||||
let sr = vec![self.source_range];
|
||||
let mut args = self.args.iter();
|
||||
let num_repetitions = args.next().ok_or_else(|| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: "Missing first argument (should be the number of repetitions)".to_owned(),
|
||||
source_ranges: sr.clone(),
|
||||
})
|
||||
})?;
|
||||
let num_repetitions = num_repetitions.get_u32(sr.clone())?;
|
||||
let transform = args.next().ok_or_else(|| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: "Missing second argument (should be the transform function)".to_owned(),
|
||||
source_ranges: sr.clone(),
|
||||
})
|
||||
})?;
|
||||
let func = transform.get_function(sr.clone())?;
|
||||
let eg = args.next().ok_or_else(|| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: "Missing third argument (should be a Sketch/ExtrudeGroup or an array of Sketch/ExtrudeGroups)"
|
||||
.to_owned(),
|
||||
source_ranges: sr.clone(),
|
||||
})
|
||||
})?;
|
||||
let eg = eg.get_extrude_group_set().map_err(|_e| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: "Third argument was not an ExtrudeGroup".to_owned(),
|
||||
source_ranges: sr.clone(),
|
||||
})
|
||||
})?;
|
||||
Ok((num_repetitions, func, eg))
|
||||
}
|
||||
|
||||
fn get_hypotenuse_leg(&self) -> Result<(f64, f64), KclError> {
|
||||
let numbers = self.get_number_array()?;
|
||||
|
||||
@ -1242,6 +1276,11 @@ pub enum Primitive {
|
||||
Uuid,
|
||||
}
|
||||
|
||||
pub struct FnAsArg<'a> {
|
||||
pub func: &'a crate::executor::MemoryFunction,
|
||||
pub expr: Box<FunctionExpression>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base64::Engine;
|
||||
|
@ -8,7 +8,11 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
executor::{ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, SketchGroup, SketchGroupSet},
|
||||
executor::{
|
||||
ExtrudeGroup, ExtrudeGroupSet, Geometries, Geometry, MemoryItem, Point3d, ProgramReturn, SketchGroup,
|
||||
SketchGroupSet, SourceRange, UserVal,
|
||||
},
|
||||
function_param::FunctionParam,
|
||||
std::{types::Uint, Args},
|
||||
};
|
||||
|
||||
@ -70,6 +74,233 @@ impl LinearPattern {
|
||||
}
|
||||
}
|
||||
|
||||
/// A linear pattern
|
||||
/// Each element in the pattern repeats a particular piece of geometry.
|
||||
/// The repetitions can be transformed by the `transform` parameter.
|
||||
pub async fn pattern_transform(args: Args) -> Result<MemoryItem, KclError> {
|
||||
let (num_repetitions, transform, extr) = args.get_pattern_transform_args()?;
|
||||
|
||||
let extrude_groups = inner_pattern_transform(
|
||||
num_repetitions,
|
||||
FunctionParam {
|
||||
inner: transform.func,
|
||||
fn_expr: transform.expr,
|
||||
meta: vec![args.source_range.into()],
|
||||
ctx: args.ctx.clone(),
|
||||
memory: args.current_program_memory.clone(),
|
||||
},
|
||||
extr,
|
||||
&args,
|
||||
)
|
||||
.await?;
|
||||
Ok(MemoryItem::ExtrudeGroups { value: extrude_groups })
|
||||
}
|
||||
|
||||
/// A linear pattern on a 3D solid.
|
||||
/// Each repetition of the pattern can be transformed (e.g. scaled, translated, hidden, etc).
|
||||
///
|
||||
/// ```no_run
|
||||
/// // Parameters
|
||||
/// const r = 50 // base radius
|
||||
/// const h = 10 // layer height
|
||||
/// const t = 0.005 // taper factor [0-1)
|
||||
/// // Defines how to modify each layer of the vase.
|
||||
/// // Each replica is shifted up the Z axis, and has a smoothly-varying radius
|
||||
/// fn transform = (replicaId) => {
|
||||
/// let scale = r * abs(1 - (t * replicaId)) * (5 + cos(replicaId / 8))
|
||||
/// return {
|
||||
/// translate: [0, 0, replicaId * 10],
|
||||
/// scale: [scale, scale, 0],
|
||||
/// }
|
||||
/// }
|
||||
/// // Each layer is just a pretty thin cylinder.
|
||||
/// fn layer = () => {
|
||||
/// return startSketchOn("XY") // or some other plane idk
|
||||
/// |> circle([0, 0], 1, %, 'tag1')
|
||||
/// |> extrude(h, %)
|
||||
/// }
|
||||
/// // The vase is 100 layers tall.
|
||||
/// // The 100 layers are replica of each other, with a slight transformation applied to each.
|
||||
/// let vase = layer() |> patternTransform(100, transform, %)
|
||||
/// ```
|
||||
#[stdlib {
|
||||
name = "patternTransform",
|
||||
}]
|
||||
async fn inner_pattern_transform<'a>(
|
||||
num_repetitions: u32,
|
||||
transform_function: FunctionParam<'a>,
|
||||
extrude_group_set: ExtrudeGroupSet,
|
||||
args: &'a Args,
|
||||
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
|
||||
// Build the vec of transforms, one for each repetition.
|
||||
let mut transform = Vec::new();
|
||||
for i in 0..num_repetitions {
|
||||
let t = make_transform(i, &transform_function, args.source_range).await?;
|
||||
transform.push(t);
|
||||
}
|
||||
// Flush the batch for our fillets/chamfers if there are any.
|
||||
// If we do not flush these, then you won't be able to pattern something with fillets.
|
||||
// Flush just the fillets/chamfers that apply to these extrude groups.
|
||||
args.flush_batch_for_extrude_group_set(extrude_group_set.clone().into())
|
||||
.await?;
|
||||
|
||||
let starting_extrude_groups: Vec<Box<ExtrudeGroup>> = extrude_group_set.into();
|
||||
|
||||
if args.ctx.is_mock {
|
||||
return Ok(starting_extrude_groups);
|
||||
}
|
||||
|
||||
let mut extrude_groups = Vec::new();
|
||||
for e in starting_extrude_groups {
|
||||
let new_extrude_groups = send_pattern_transform(transform.clone(), &e, args).await?;
|
||||
extrude_groups.extend(new_extrude_groups);
|
||||
}
|
||||
Ok(extrude_groups)
|
||||
}
|
||||
|
||||
async fn send_pattern_transform(
|
||||
// This should be passed via reference, see
|
||||
// https://github.com/KittyCAD/modeling-app/issues/2821
|
||||
transform: Vec<kittycad::types::LinearTransform>,
|
||||
extrude_group: &ExtrudeGroup,
|
||||
args: &Args,
|
||||
) -> Result<Vec<Box<ExtrudeGroup>>, KclError> {
|
||||
let id = uuid::Uuid::new_v4();
|
||||
|
||||
let resp = args
|
||||
.send_modeling_cmd(
|
||||
id,
|
||||
ModelingCmd::EntityLinearPatternTransform {
|
||||
entity_id: extrude_group.id,
|
||||
transform,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let kittycad::types::OkWebSocketResponseData::Modeling {
|
||||
modeling_response: kittycad::types::OkModelingCmdResponse::EntityLinearPatternTransform { data: pattern_info },
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("EntityLinearPattern response was not as expected: {:?}", resp),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
};
|
||||
|
||||
let mut geometries = vec![Box::new(extrude_group.clone())];
|
||||
for id in pattern_info.entity_ids.iter() {
|
||||
let mut new_extrude_group = extrude_group.clone();
|
||||
new_extrude_group.id = *id;
|
||||
geometries.push(Box::new(new_extrude_group));
|
||||
}
|
||||
Ok(geometries)
|
||||
}
|
||||
|
||||
async fn make_transform<'a>(
|
||||
i: u32,
|
||||
transform_function: &FunctionParam<'a>,
|
||||
source_range: SourceRange,
|
||||
) -> Result<kittycad::types::LinearTransform, KclError> {
|
||||
// Call the transform fn for this repetition.
|
||||
let repetition_num = MemoryItem::UserVal(UserVal {
|
||||
value: serde_json::Value::Number(i.into()),
|
||||
meta: vec![source_range.into()],
|
||||
});
|
||||
let transform_fn_args = vec![repetition_num];
|
||||
let transform_fn_return = transform_function.call(transform_fn_args).await?.0;
|
||||
|
||||
// Unpack the returned transform object.
|
||||
let source_ranges = vec![source_range];
|
||||
let transform_fn_return = transform_fn_return.ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
message: "Transform function must return a value".to_string(),
|
||||
source_ranges: source_ranges.clone(),
|
||||
})
|
||||
})?;
|
||||
let ProgramReturn::Value(transform_fn_return) = transform_fn_return else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "Transform function must return a value".to_string(),
|
||||
source_ranges: source_ranges.clone(),
|
||||
}));
|
||||
};
|
||||
let MemoryItem::UserVal(transform) = transform_fn_return else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "Transform function must return a transform object".to_string(),
|
||||
source_ranges: source_ranges.clone(),
|
||||
}));
|
||||
};
|
||||
|
||||
// Apply defaults to the transform.
|
||||
let replicate = match transform.value.get("replicate") {
|
||||
Some(serde_json::Value::Bool(true)) => true,
|
||||
Some(serde_json::Value::Bool(false)) => false,
|
||||
Some(_) => {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "The 'replicate' key must be a bool".to_string(),
|
||||
source_ranges: source_ranges.clone(),
|
||||
}));
|
||||
}
|
||||
None => true,
|
||||
};
|
||||
let scale = match transform.value.get("scale") {
|
||||
Some(x) => array_to_point3d(x, source_ranges.clone())?,
|
||||
None => Point3d { x: 1.0, y: 1.0, z: 1.0 },
|
||||
};
|
||||
let translate = match transform.value.get("translate") {
|
||||
Some(x) => array_to_point3d(x, source_ranges.clone())?,
|
||||
None => Point3d { x: 0.0, y: 0.0, z: 0.0 },
|
||||
};
|
||||
let t = kittycad::types::LinearTransform {
|
||||
replicate,
|
||||
scale: Some(scale.into()),
|
||||
translate: Some(translate.into()),
|
||||
};
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
fn array_to_point3d(json: &serde_json::Value, source_ranges: Vec<SourceRange>) -> Result<Point3d, KclError> {
|
||||
let serde_json::Value::Array(arr) = dbg!(json) else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: "Expected an array of 3 numbers (i.e. a 3D point)".to_string(),
|
||||
source_ranges,
|
||||
}));
|
||||
};
|
||||
let len = arr.len();
|
||||
if len != 3 {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
message: format!("Expected an array of 3 numbers (i.e. a 3D point) but found {len} items"),
|
||||
source_ranges,
|
||||
}));
|
||||
};
|
||||
// Gets an f64 from a JSON value, returns Option.
|
||||
let f = |j: &serde_json::Value| j.as_number().and_then(|num| num.as_f64()).map(|x| x.to_owned());
|
||||
let err = |component| {
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
message: format!("{component} component of this point was not a number"),
|
||||
source_ranges: source_ranges.clone(),
|
||||
})
|
||||
};
|
||||
let x = f(&arr[0]).ok_or_else(|| err("X"))?;
|
||||
let y = f(&arr[1]).ok_or_else(|| err("Y"))?;
|
||||
let z = f(&arr[2]).ok_or_else(|| err("Z"))?;
|
||||
Ok(Point3d { x, y, z })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_array_to_point3d() {
|
||||
let input = serde_json::json! {
|
||||
[1.1, 2.2, 3.3]
|
||||
};
|
||||
let expected = Point3d { x: 1.1, y: 2.2, z: 3.3 };
|
||||
let actual = array_to_point3d(&input, Vec::new());
|
||||
assert_eq!(actual.unwrap(), expected);
|
||||
}
|
||||
}
|
||||
|
||||
/// A linear pattern on a 2D sketch.
|
||||
pub async fn pattern_linear_2d(args: Args) -> Result<MemoryItem, KclError> {
|
||||
let (data, sketch_group_set): (LinearPattern2dData, SketchGroupSet) = args.get_data_and_sketch_group_set()?;
|
||||
|
@ -431,7 +431,7 @@ mod tests {
|
||||
);
|
||||
|
||||
if let Err(err) = result {
|
||||
assert!(err.to_string().contains( "Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
||||
assert!(err.to_string().contains("Point Point2d { x: 0.0, y: 5.0 } is not on the circumference of the circle with center Point2d { x: 10.0, y: -10.0 } and radius 10."));
|
||||
} else {
|
||||
panic!("Expected error");
|
||||
}
|
||||
|
296
src/wasm-lib/kcl/src/walk/ast_walk.rs
Normal file
296
src/wasm-lib/kcl/src/walk/ast_walk.rs
Normal file
@ -0,0 +1,296 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
ast::types::{
|
||||
BinaryPart, BodyItem, LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, ObjectProperty,
|
||||
Parameter, Program, UnaryExpression, Value, VariableDeclarator,
|
||||
},
|
||||
walk::Node,
|
||||
};
|
||||
|
||||
/// Walker is implemented by things that are able to walk an AST tree to
|
||||
/// produce lints. This trait is implemented automatically for a few of the
|
||||
/// common types, but can be manually implemented too.
|
||||
pub trait Walker<'a> {
|
||||
/// Walk will visit every element of the AST.
|
||||
fn walk(&self, n: Node<'a>) -> Result<bool>;
|
||||
}
|
||||
|
||||
impl<'a, FnT> Walker<'a> for FnT
|
||||
where
|
||||
FnT: Fn(Node<'a>) -> Result<bool>,
|
||||
{
|
||||
fn walk(&self, n: Node<'a>) -> Result<bool> {
|
||||
self(n)
|
||||
}
|
||||
}
|
||||
|
||||
/// Run the Walker against all [Node]s in a [Program].
|
||||
pub fn walk<'a, WalkT>(prog: &'a Program, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(prog.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
for bi in &prog.body {
|
||||
if !walk_body_item(bi, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn walk_variable_declarator<'a, WalkT>(node: &'a VariableDeclarator, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
if !f.walk((&node.id).into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_value(&node.init, f)
|
||||
}
|
||||
|
||||
fn walk_parameter<'a, WalkT>(node: &'a Parameter, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
f.walk((&node.identifier).into())
|
||||
}
|
||||
|
||||
fn walk_member_object<'a, WalkT>(node: &'a MemberObject, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())
|
||||
}
|
||||
|
||||
fn walk_literal_identifier<'a, WalkT>(node: &'a LiteralIdentifier, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
f.walk(node.into())
|
||||
}
|
||||
|
||||
fn walk_member_expression<'a, WalkT>(node: &'a MemberExpression, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
if !walk_member_object(&node.object, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
walk_literal_identifier(&node.property, f)
|
||||
}
|
||||
|
||||
fn walk_binary_part<'a, WalkT>(node: &'a BinaryPart, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
match node {
|
||||
BinaryPart::Literal(lit) => f.walk(lit.as_ref().into()),
|
||||
BinaryPart::Identifier(id) => f.walk(id.as_ref().into()),
|
||||
BinaryPart::BinaryExpression(be) => f.walk(be.as_ref().into()),
|
||||
BinaryPart::CallExpression(ce) => f.walk(ce.as_ref().into()),
|
||||
BinaryPart::UnaryExpression(ue) => walk_unary_expression(ue, f),
|
||||
BinaryPart::MemberExpression(me) => walk_member_expression(me, f),
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_value<'a, WalkT>(node: &'a Value, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
match node {
|
||||
Value::Literal(lit) => f.walk(lit.as_ref().into()),
|
||||
Value::TagDeclarator(tag) => f.walk(tag.as_ref().into()),
|
||||
|
||||
Value::Identifier(id) => {
|
||||
// sometimes there's a bare Identifier without a Value::Identifier.
|
||||
f.walk(id.as_ref().into())
|
||||
}
|
||||
|
||||
Value::BinaryExpression(be) => {
|
||||
if !f.walk(be.as_ref().into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
if !walk_binary_part(&be.left, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_binary_part(&be.right, f)
|
||||
}
|
||||
Value::FunctionExpression(fe) => {
|
||||
if !f.walk(fe.as_ref().into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
for arg in &fe.params {
|
||||
if !walk_parameter(arg, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
walk(&fe.body, f)
|
||||
}
|
||||
Value::CallExpression(ce) => {
|
||||
if !f.walk(ce.as_ref().into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
if !f.walk((&ce.callee).into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
for e in &ce.arguments {
|
||||
if !walk_value::<WalkT>(e, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Value::PipeExpression(pe) => {
|
||||
if !f.walk(pe.as_ref().into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
for e in &pe.body {
|
||||
if !walk_value::<WalkT>(e, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Value::PipeSubstitution(ps) => f.walk(ps.as_ref().into()),
|
||||
Value::ArrayExpression(ae) => {
|
||||
if !f.walk(ae.as_ref().into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
for e in &ae.elements {
|
||||
if !walk_value::<WalkT>(e, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Value::ObjectExpression(oe) => walk_object_expression(oe, f),
|
||||
Value::MemberExpression(me) => walk_member_expression(me, f),
|
||||
Value::UnaryExpression(ue) => walk_unary_expression(ue, f),
|
||||
Value::None(_) => Ok(true),
|
||||
}
|
||||
}
|
||||
|
||||
/// Walk through an [ObjectProperty].
|
||||
fn walk_object_property<'a, WalkT>(node: &'a ObjectProperty, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_value(&node.value, f)
|
||||
}
|
||||
|
||||
/// Walk through an [ObjectExpression].
|
||||
fn walk_object_expression<'a, WalkT>(node: &'a ObjectExpression, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
for prop in &node.properties {
|
||||
if !walk_object_property(prop, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// walk through an [UnaryExpression].
|
||||
fn walk_unary_expression<'a, WalkT>(node: &'a UnaryExpression, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
if !f.walk(node.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_binary_part(&node.argument, f)
|
||||
}
|
||||
|
||||
/// walk through a [BodyItem].
|
||||
fn walk_body_item<'a, WalkT>(node: &'a BodyItem, f: &WalkT) -> Result<bool>
|
||||
where
|
||||
WalkT: Walker<'a>,
|
||||
{
|
||||
// We don't walk a BodyItem since it's an enum itself.
|
||||
|
||||
match node {
|
||||
BodyItem::ExpressionStatement(xs) => {
|
||||
if !f.walk(xs.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_value(&xs.expression, f)
|
||||
}
|
||||
BodyItem::VariableDeclaration(vd) => {
|
||||
if !f.walk(vd.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
for dec in &vd.declarations {
|
||||
if !walk_variable_declarator(dec, f)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
BodyItem::ReturnStatement(rs) => {
|
||||
if !f.walk(rs.into())? {
|
||||
return Ok(false);
|
||||
}
|
||||
walk_value(&rs.argument, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
macro_rules! kcl {
|
||||
( $kcl:expr ) => {{
|
||||
let tokens = $crate::token::lexer($kcl).unwrap();
|
||||
let parser = $crate::parser::Parser::new(tokens);
|
||||
parser.ast().unwrap()
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stop_walking() {
|
||||
let program = kcl!(
|
||||
"
|
||||
const foo = 1
|
||||
const bar = 2
|
||||
"
|
||||
);
|
||||
|
||||
walk(&program, &|node| {
|
||||
if let Node::VariableDeclarator(vd) = node {
|
||||
if vd.id.name == "foo" {
|
||||
return Ok(false);
|
||||
}
|
||||
panic!("walk didn't stop");
|
||||
}
|
||||
Ok(true)
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user