Compare commits
39 Commits
kcl-0.2.22
...
mike/engin
Author | SHA1 | Date | |
---|---|---|---|
5bea90ad9a | |||
d23ddc19eb | |||
4bd7e02271 | |||
26042790b6 | |||
af74f3bb05 | |||
0bdedf5854 | |||
d2c6b5cf3a | |||
c42967d0e7 | |||
cb8fc33adb | |||
2dc8b429ff | |||
19ffa220e8 | |||
5332ddd88e | |||
11d9a2ee00 | |||
bfebc41a5c | |||
824b4c823e | |||
785002fa4e | |||
f650281855 | |||
9f6999829a | |||
a14bbaa237 | |||
0706624381 | |||
ef0ae5e06e | |||
a010743abb | |||
057ee479c3 | |||
7218efc489 | |||
b6dd6e7dd0 | |||
47af18f533 | |||
0505220dac | |||
f7711b71d6 | |||
e65358f635 | |||
0a1201e680 | |||
9db013e672 | |||
0196d72a2d | |||
e6af4078bd | |||
2b233dc705 | |||
b11e8af9c7 | |||
c017847d7b | |||
9635eea8c1 | |||
5a2df642b1 | |||
621e41080e |
BIN
..env.development.local.swp
Normal file
@ -1,3 +1,3 @@
|
||||
[codespell]
|
||||
ignore-words-list: crate,everytime,inout,co-ordinate,ot,nwo,absolutey,atleast,ue,afterall
|
||||
skip: **/target,node_modules,build,**/Cargo.lock,./docs/kcl/*.md,.yarn.lock,**/yarn.lock
|
||||
skip: **/target,node_modules,build,**/Cargo.lock,./docs/kcl/*.md,.yarn.lock,**/yarn.lock,./openapi/*.json,./src/lib/machine-api.d.ts
|
||||
|
@ -1,3 +1,4 @@
|
||||
src/wasm-lib/*
|
||||
src/lib/engine-utils/engine.js
|
||||
*.typegen.ts
|
||||
packages/codemirror-lsp-client/dist/*
|
||||
|
95
.github/workflows/build-test-publish-apps.yml
vendored
@ -72,6 +72,18 @@ jobs:
|
||||
- id: export_version
|
||||
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Prepare electron-builder.yml file for nightly
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
run: |
|
||||
yq -i '.publish[0].url = "https://dl.zoo.dev/releases/modeling-app/nightly"' electron-builder.yml
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
with:
|
||||
name: prepared-files-nightly
|
||||
path: |
|
||||
electron-builder.yml
|
||||
|
||||
- id: export_notes
|
||||
run: echo "notes=`cat release-notes.md'`" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@ -81,6 +93,7 @@ jobs:
|
||||
yq -i '.publish[0].url = "https://dl.zoo.dev/releases/modeling-app/updater-test-release-notes"' electron-builder.yml
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ env.CUT_RELEASE_PR == 'true' }}
|
||||
with:
|
||||
name: prepared-files-updater-test
|
||||
path: |
|
||||
@ -92,7 +105,13 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-14, windows-2022, ubuntu-22.04]
|
||||
include:
|
||||
- os: macos-14
|
||||
platform: mac
|
||||
- os: windows-2022
|
||||
platform: win
|
||||
- os: ubuntu-22.04
|
||||
platform: linux
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
@ -121,6 +140,16 @@ jobs:
|
||||
cp prepared-files/src/wasm-lib/pkg/wasm_lib* src/wasm-lib/pkg
|
||||
cp prepared-files/release-notes.md release-notes.md
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
name: prepared-files-nightly
|
||||
|
||||
- name: Copy updated electron-builder.yml file for nightly build
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
run: |
|
||||
ls -R prepared-files-nightly
|
||||
cp prepared-files-nightly/electron-builder.yml electron-builder.yml
|
||||
|
||||
- name: Sync node version and setup cache
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@ -165,9 +194,27 @@ jobs:
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: out-${{ matrix.os }}
|
||||
name: out-arm64-${{ matrix.platform }}
|
||||
# first two will pick both Zoo Modeling App-$VERSION-arm64-win.exe and Zoo Modeling App-$VERSION-win.exe
|
||||
path: |
|
||||
out/*-${{ env.VERSION_NO_V }}-win.*
|
||||
out/*-${{ env.VERSION_NO_V }}-arm64-win.*
|
||||
out/*-arm64-mac.*
|
||||
out/*-arm64-linux.*
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: out-x64-${{ matrix.platform }}
|
||||
path: |
|
||||
out/*-x64-win.*
|
||||
out/*-x64-mac.*
|
||||
out/*-x86_64-linux.*
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ env.BUILD_RELEASE == 'true' }}
|
||||
with:
|
||||
name: out-yml
|
||||
path: |
|
||||
out/Zoo*.*
|
||||
out/latest*.yml
|
||||
|
||||
# TODO: add the 'Build for Mac TestFlight (nightly)' stage back
|
||||
@ -189,10 +236,20 @@ jobs:
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ env.CUT_RELEASE_PR == 'true' }}
|
||||
with:
|
||||
name: updater-test-${{ matrix.os }}
|
||||
name: updater-test-arm64-${{ matrix.platform }}
|
||||
path: |
|
||||
out/Zoo*.*
|
||||
out/latest*.yml
|
||||
out/*-arm64-win.exe
|
||||
out/*-arm64-mac.dmg
|
||||
out/*-arm64-linux.AppImage
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ env.CUT_RELEASE_PR == 'true' }}
|
||||
with:
|
||||
name: updater-test-x64-${{ matrix.platform }}
|
||||
path: |
|
||||
out/*-x64-win.exe
|
||||
out/*-x64-mac.dmg
|
||||
out/*-x86_64-linux.AppImage
|
||||
|
||||
|
||||
publish-apps-release:
|
||||
@ -214,17 +271,37 @@ jobs:
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-windows-2022
|
||||
name: out-arm64-win
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-macos-14
|
||||
name: out-x64-win
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-ubuntu-22.04
|
||||
name: out-arm64-mac
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-x64-mac
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-arm64-linux
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-x64-linux
|
||||
path: out
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: out-yml
|
||||
path: out
|
||||
|
||||
- name: Generate the download static endpoint
|
||||
|
4
.gitignore
vendored
@ -66,3 +66,7 @@ venv
|
||||
|
||||
# electron
|
||||
out/
|
||||
|
||||
# engine wasm utils
|
||||
src/lib/engine-utils/engine.wasm
|
||||
src/lib/engine-utils/engine.js
|
||||
|
22
README.md
@ -158,11 +158,29 @@ The PR may then serve as a place to discuss the human-readable changelog and ext
|
||||
|
||||
#### 3. Manually test artifacts from the Cut Release PR
|
||||
|
||||
The release builds can be find under the `artifact` zip, at the very bottom of the `ci` action page for each commit on this branch.
|
||||
##### Release builds
|
||||
|
||||
The release builds can be found under the `out-{platform}` zip, at the very bottom of the `build-publish-apps` summary page for each commit on this branch.
|
||||
|
||||
Manually test against this [list](https://github.com/KittyCAD/modeling-app/issues/3588) across Windows, MacOS, Linux and posting results as comments in the Cut Release PR.
|
||||
|
||||
The other `ci` output in Cut Release PRs is `updater-test`, because we don't have a way to test this fully automated, we have a semi-automated process. Download updater-test zip file, install the app, run it, expect an updater prompt to a dummy v0.99.99, install it and check that the app comes back at that version (on both macOS and Windows).
|
||||
##### Updater-test builds
|
||||
|
||||
The other `build-publish-apps` output in Cut Release PRs is `updater-test-{platform}`. As we don't have a way to test this fully automatically, we have a semi-automated process. For macOS, Windows, and Linux, download the corresponding updater-test artifact file, install the app, run it, expect an updater prompt to a dummy v0.255.255, install it and check that the app comes back at that version.
|
||||
|
||||
The only difference with these builds is that they point to a different update location on the release bucket, with this dummy v0.255.255 always available. This helps ensuring that the version we release will be able to update to the next one available.
|
||||
|
||||
If the prompt doesn't show up, start the app in command line to grab the electron-updater logs. This is likely an issue with the current build that needs addressing (or the updater-test location in the storage bucket).
|
||||
```
|
||||
# Windows (PowerShell)
|
||||
& 'C:\Program Files\Zoo Modeling App\Zoo Modeling App.exe'
|
||||
|
||||
# macOS
|
||||
/Applications/Zoo\ Modeling\ App.app/Contents/MacOS/Zoo\ Modeling\ App
|
||||
|
||||
# Linux
|
||||
./Zoo Modeling App-{version}-{arch}-linux.AppImage
|
||||
```
|
||||
|
||||
#### 4. Merge the Cut Release PR
|
||||
|
||||
|
@ -82652,7 +82652,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -83209,7 +83208,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -86410,7 +86408,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -86967,7 +86964,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -90172,7 +90168,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -90729,7 +90724,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -114786,7 +114780,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -115343,7 +115336,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -118937,7 +118929,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -119494,7 +119485,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -122695,7 +122685,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -123252,7 +123241,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
@ -126451,7 +126439,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start"
|
||||
],
|
||||
"properties": {
|
||||
@ -127008,7 +126995,6 @@
|
||||
"required": [
|
||||
"body",
|
||||
"end",
|
||||
"nonCodeMeta",
|
||||
"start",
|
||||
"type"
|
||||
],
|
||||
|
@ -313,3 +313,45 @@ test(
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
|
||||
test(
|
||||
'external change of file contents are reflected in editor',
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
const PROJECT_DIR_NAME = 'lee-was-here'
|
||||
const {
|
||||
electronApp,
|
||||
page,
|
||||
dir: projectsDir,
|
||||
} = await setupElectron({
|
||||
testInfo,
|
||||
folderSetupFn: async (dir) => {
|
||||
const aProjectDir = join(dir, PROJECT_DIR_NAME)
|
||||
await fsp.mkdir(aProjectDir, { recursive: true })
|
||||
},
|
||||
})
|
||||
|
||||
const u = await getUtils(page)
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
|
||||
await test.step('Open the project', async () => {
|
||||
await expect(page.getByText(PROJECT_DIR_NAME)).toBeVisible()
|
||||
await page.getByText(PROJECT_DIR_NAME).click()
|
||||
await u.waitForPageLoad()
|
||||
})
|
||||
|
||||
await u.openFilePanel()
|
||||
await u.openKclCodePanel()
|
||||
|
||||
await test.step('Write to file externally and check for changed content', async () => {
|
||||
const content = 'ha he ho ho ha blap scap be dap'
|
||||
await fsp.writeFile(
|
||||
join(projectsDir, PROJECT_DIR_NAME, 'main.kcl'),
|
||||
content
|
||||
)
|
||||
await u.editorTextMatches(content)
|
||||
})
|
||||
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
|
@ -104,7 +104,7 @@ test(
|
||||
},
|
||||
{ timeout: 15_000 }
|
||||
)
|
||||
.toBe(431341)
|
||||
.toBeGreaterThan(300_000)
|
||||
|
||||
// clean up output.gltf
|
||||
await fsp.rm('output.gltf')
|
||||
@ -179,7 +179,7 @@ test(
|
||||
},
|
||||
{ timeout: 15_000 }
|
||||
)
|
||||
.toBe(102040)
|
||||
.toBeGreaterThan(100_000)
|
||||
|
||||
// clean up output.gltf
|
||||
await fsp.rm('output.gltf')
|
||||
|
@ -1,6 +1,16 @@
|
||||
import { test, expect } from '@playwright/test'
|
||||
import fsp from 'fs/promises'
|
||||
import { uuidv4 } from 'lib/utils'
|
||||
import { getUtils, setup, tearDown } from './test-utils'
|
||||
import {
|
||||
darkModeBgColor,
|
||||
darkModePlaneColorXZ,
|
||||
executorInputPath,
|
||||
getUtils,
|
||||
setup,
|
||||
setupElectron,
|
||||
tearDown,
|
||||
} from './test-utils'
|
||||
import { join } from 'path'
|
||||
|
||||
test.beforeEach(async ({ context, page }, testInfo) => {
|
||||
await setup(context, page, testInfo)
|
||||
@ -974,4 +984,84 @@ test.describe('Editor tests', () => {
|
||||
|> close(%)
|
||||
|> extrude(5, %)`)
|
||||
})
|
||||
|
||||
test(
|
||||
`Can use the import stdlib function on a local OBJ file`,
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
const { electronApp, page } = await setupElectron({
|
||||
testInfo,
|
||||
folderSetupFn: async (dir) => {
|
||||
const bracketDir = join(dir, 'cube')
|
||||
await fsp.mkdir(bracketDir, { recursive: true })
|
||||
await fsp.copyFile(
|
||||
executorInputPath('cube.obj'),
|
||||
join(bracketDir, 'cube.obj')
|
||||
)
|
||||
await fsp.writeFile(join(bracketDir, 'main.kcl'), '')
|
||||
},
|
||||
})
|
||||
const viewportSize = { width: 1200, height: 500 }
|
||||
await page.setViewportSize(viewportSize)
|
||||
|
||||
// Locators and constants
|
||||
const u = await getUtils(page)
|
||||
const projectLink = page.getByRole('link', { name: 'cube' })
|
||||
const gizmo = page.locator('[aria-label*=gizmo]')
|
||||
const resetCameraButton = page.getByRole('button', { name: 'Reset view' })
|
||||
const locationToHavColor = async (
|
||||
position: { x: number; y: number },
|
||||
color: [number, number, number]
|
||||
) => {
|
||||
return u.getGreatestPixDiff(position, color)
|
||||
}
|
||||
const notTheOrigin = {
|
||||
x: viewportSize.width * 0.55,
|
||||
y: viewportSize.height * 0.3,
|
||||
}
|
||||
const origin = { x: viewportSize.width / 2, y: viewportSize.height / 2 }
|
||||
const errorIndicators = page.locator('.cm-lint-marker-error')
|
||||
|
||||
await test.step(`Open the empty file, see the default planes`, async () => {
|
||||
await projectLink.click()
|
||||
await u.waitForPageLoad()
|
||||
await expect
|
||||
.poll(
|
||||
async () => locationToHavColor(notTheOrigin, darkModePlaneColorXZ),
|
||||
{
|
||||
timeout: 5000,
|
||||
message: 'XZ plane color is visible',
|
||||
}
|
||||
)
|
||||
.toBeLessThan(15)
|
||||
})
|
||||
await test.step(`Write the import function line`, async () => {
|
||||
await u.codeLocator.fill(`import('cube.obj')`)
|
||||
await page.waitForTimeout(800)
|
||||
})
|
||||
await test.step(`Reset the camera before checking`, async () => {
|
||||
await u.doAndWaitForCmd(async () => {
|
||||
await gizmo.click({ button: 'right' })
|
||||
await resetCameraButton.click()
|
||||
}, 'zoom_to_fit')
|
||||
})
|
||||
await test.step(`Verify that we see the imported geometry and no errors`, async () => {
|
||||
await expect(errorIndicators).toHaveCount(0)
|
||||
await expect
|
||||
.poll(async () => locationToHavColor(origin, darkModePlaneColorXZ), {
|
||||
timeout: 3000,
|
||||
message: 'Plane color should not be visible',
|
||||
})
|
||||
.toBeGreaterThan(15)
|
||||
await expect
|
||||
.poll(async () => locationToHavColor(origin, darkModeBgColor), {
|
||||
timeout: 3000,
|
||||
message: 'Background color should not be visible',
|
||||
})
|
||||
.toBeGreaterThan(15)
|
||||
})
|
||||
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -136,6 +136,9 @@ test.describe('when using the file tree to', () => {
|
||||
)
|
||||
await pasteCodeInEditor(kclCube)
|
||||
|
||||
// TODO: We have a timeout of 1s between edits to write to disk. If you reload the page too quickly it won't write to disk.
|
||||
await tronApp.page.waitForTimeout(2000)
|
||||
|
||||
await renameFile(fromFile, toFile)
|
||||
await tronApp.page.reload()
|
||||
|
||||
@ -222,9 +225,11 @@ test.describe('when using the file tree to', () => {
|
||||
)
|
||||
await pasteCodeInEditor(kclCube)
|
||||
|
||||
// TODO: We have a timeout of 1s between edits to write to disk. If you reload the page too quickly it won't write to disk.
|
||||
await tronApp.page.waitForTimeout(2000)
|
||||
|
||||
const kcl1 = 'main.kcl'
|
||||
const kcl2 = '2.kcl'
|
||||
|
||||
await createNewFileAndSelect(kcl2)
|
||||
const kclCylinder = await fsp.readFile(
|
||||
'src/wasm-lib/tests/executor/inputs/cylinder.kcl',
|
||||
@ -232,6 +237,9 @@ test.describe('when using the file tree to', () => {
|
||||
)
|
||||
await pasteCodeInEditor(kclCylinder)
|
||||
|
||||
// TODO: We have a timeout of 1s between edits to write to disk. If you reload the page too quickly it won't write to disk.
|
||||
await tronApp.page.waitForTimeout(2000)
|
||||
|
||||
await renameFile(kcl2, kcl1)
|
||||
|
||||
await test.step(`Postcondition: ${kcl1} still has the original content`, async () => {
|
||||
@ -960,4 +968,171 @@ _test.describe('Deleting items from the file pane', () => {
|
||||
'TODO - delete folder we are in, with no main.kcl',
|
||||
async () => {}
|
||||
)
|
||||
|
||||
// Copied from tests above.
|
||||
_test(
|
||||
`external deletion of project navigates back home`,
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
const TEST_PROJECT_NAME = 'Test Project'
|
||||
const {
|
||||
electronApp,
|
||||
page,
|
||||
dir: projectsDirName,
|
||||
} = await setupElectron({
|
||||
testInfo,
|
||||
folderSetupFn: async (dir) => {
|
||||
await fsp.mkdir(join(dir, TEST_PROJECT_NAME), { recursive: true })
|
||||
await fsp.mkdir(join(dir, TEST_PROJECT_NAME, 'folderToDelete'), {
|
||||
recursive: true,
|
||||
})
|
||||
await fsp.copyFile(
|
||||
executorInputPath('basic_fillet_cube_end.kcl'),
|
||||
join(dir, TEST_PROJECT_NAME, 'main.kcl')
|
||||
)
|
||||
await fsp.copyFile(
|
||||
executorInputPath('cylinder.kcl'),
|
||||
join(dir, TEST_PROJECT_NAME, 'folderToDelete', 'someFileWithin.kcl')
|
||||
)
|
||||
},
|
||||
})
|
||||
const u = await getUtils(page)
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
|
||||
// Constants and locators
|
||||
const projectCard = page.getByText(TEST_PROJECT_NAME)
|
||||
const projectMenuButton = page.getByTestId('project-sidebar-toggle')
|
||||
const folderToDelete = page.getByRole('button', {
|
||||
name: 'folderToDelete',
|
||||
})
|
||||
const fileWithinFolder = page.getByRole('listitem').filter({
|
||||
has: page.getByRole('button', { name: 'someFileWithin.kcl' }),
|
||||
})
|
||||
|
||||
await _test.step(
|
||||
'Open project and navigate into folderToDelete',
|
||||
async () => {
|
||||
await projectCard.click()
|
||||
await u.waitForPageLoad()
|
||||
await _expect(projectMenuButton).toContainText('main.kcl')
|
||||
await u.closeKclCodePanel()
|
||||
await u.openFilePanel()
|
||||
|
||||
await folderToDelete.click()
|
||||
await _expect(fileWithinFolder).toBeVisible()
|
||||
await fileWithinFolder.click()
|
||||
await _expect(projectMenuButton).toContainText('someFileWithin.kcl')
|
||||
}
|
||||
)
|
||||
|
||||
// Point of divergence. Delete the project folder and see if it goes back
|
||||
// to the home view.
|
||||
await _test.step(
|
||||
'Delete projectsDirName/<project-name> externally',
|
||||
async () => {
|
||||
await fsp.rm(join(projectsDirName, TEST_PROJECT_NAME), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
await _test.step('Check the app is back on the home view', async () => {
|
||||
const projectsDirLink = page.getByText('Loaded from')
|
||||
await _expect(projectsDirLink).toBeVisible()
|
||||
})
|
||||
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
|
||||
// Similar to the above
|
||||
_test(
|
||||
`external deletion of file in sub-directory updates the file tree and recreates it on code editor typing`,
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
const TEST_PROJECT_NAME = 'Test Project'
|
||||
const {
|
||||
electronApp,
|
||||
page,
|
||||
dir: projectsDirName,
|
||||
} = await setupElectron({
|
||||
testInfo,
|
||||
folderSetupFn: async (dir) => {
|
||||
await fsp.mkdir(join(dir, TEST_PROJECT_NAME), { recursive: true })
|
||||
await fsp.mkdir(join(dir, TEST_PROJECT_NAME, 'folderToDelete'), {
|
||||
recursive: true,
|
||||
})
|
||||
await fsp.copyFile(
|
||||
executorInputPath('basic_fillet_cube_end.kcl'),
|
||||
join(dir, TEST_PROJECT_NAME, 'main.kcl')
|
||||
)
|
||||
await fsp.copyFile(
|
||||
executorInputPath('cylinder.kcl'),
|
||||
join(dir, TEST_PROJECT_NAME, 'folderToDelete', 'someFileWithin.kcl')
|
||||
)
|
||||
},
|
||||
})
|
||||
const u = await getUtils(page)
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
|
||||
// Constants and locators
|
||||
const projectCard = page.getByText(TEST_PROJECT_NAME)
|
||||
const projectMenuButton = page.getByTestId('project-sidebar-toggle')
|
||||
const folderToDelete = page.getByRole('button', {
|
||||
name: 'folderToDelete',
|
||||
})
|
||||
const fileWithinFolder = page.getByRole('listitem').filter({
|
||||
has: page.getByRole('button', { name: 'someFileWithin.kcl' }),
|
||||
})
|
||||
|
||||
await _test.step(
|
||||
'Open project and navigate into folderToDelete',
|
||||
async () => {
|
||||
await projectCard.click()
|
||||
await u.waitForPageLoad()
|
||||
await _expect(projectMenuButton).toContainText('main.kcl')
|
||||
|
||||
await u.openFilePanel()
|
||||
|
||||
await folderToDelete.click()
|
||||
await _expect(fileWithinFolder).toBeVisible()
|
||||
await fileWithinFolder.click()
|
||||
await _expect(projectMenuButton).toContainText('someFileWithin.kcl')
|
||||
}
|
||||
)
|
||||
|
||||
await _test.step(
|
||||
'Delete projectsDirName/<project-name> externally',
|
||||
async () => {
|
||||
await fsp.rm(
|
||||
join(
|
||||
projectsDirName,
|
||||
TEST_PROJECT_NAME,
|
||||
'folderToDelete',
|
||||
'someFileWithin.kcl'
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
await _test.step('Check the file is gone in the file tree', async () => {
|
||||
await _expect(
|
||||
page.getByTestId('file-pane-scroll-container')
|
||||
).not.toContainText('someFileWithin.kcl')
|
||||
})
|
||||
|
||||
await _test.step(
|
||||
'Check the file is back in the file tree after typing in code editor',
|
||||
async () => {
|
||||
await u.pasteCodeInEditor('hello = 1')
|
||||
await _expect(
|
||||
page.getByTestId('file-pane-scroll-container')
|
||||
).toContainText('someFileWithin.kcl')
|
||||
}
|
||||
)
|
||||
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -255,7 +255,7 @@ test.describe('Can export from electron app', () => {
|
||||
},
|
||||
{ timeout: 15_000 }
|
||||
)
|
||||
.toBe(431341)
|
||||
.toBeGreaterThan(300_000)
|
||||
|
||||
// clean up output.gltf
|
||||
await fsp.rm('output.gltf')
|
||||
@ -851,7 +851,7 @@ test(
|
||||
}
|
||||
)
|
||||
|
||||
test(
|
||||
test.fixme(
|
||||
'When the project folder is empty, user can create new project and open it.',
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
@ -861,6 +861,12 @@ test(
|
||||
|
||||
page.on('console', console.log)
|
||||
|
||||
// Locators and constants
|
||||
const gizmo = page.locator('[aria-label*=gizmo]')
|
||||
const resetCameraButton = page.getByRole('button', { name: 'Reset view' })
|
||||
const pointOnModel = { x: 660, y: 250 }
|
||||
const expectedStartCamZPosition = 15633.47
|
||||
|
||||
// expect to see text "No Projects found"
|
||||
await expect(page.getByText('No Projects found')).toBeVisible()
|
||||
|
||||
@ -873,16 +879,7 @@ test(
|
||||
|
||||
await page.getByText('project-000').click()
|
||||
|
||||
await expect(page.getByTestId('loading')).toBeAttached()
|
||||
await expect(page.getByTestId('loading')).not.toBeAttached({
|
||||
timeout: 20_000,
|
||||
})
|
||||
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'Start Sketch' })
|
||||
).toBeEnabled({
|
||||
timeout: 20_000,
|
||||
})
|
||||
await u.waitForPageLoad()
|
||||
|
||||
await page.locator('.cm-content').fill(`sketch001 = startSketchOn('XZ')
|
||||
|> startProfileAt([-87.4, 282.92], %)
|
||||
@ -892,8 +889,28 @@ test(
|
||||
|> lineTo([profileStartX(%), profileStartY(%)], %)
|
||||
|> close(%)
|
||||
extrude001 = extrude(200, sketch001)`)
|
||||
await page.waitForTimeout(800)
|
||||
|
||||
const pointOnModel = { x: 660, y: 250 }
|
||||
async function getCameraZValue() {
|
||||
return page
|
||||
.getByTestId('cam-z-position')
|
||||
.inputValue()
|
||||
.then((value) => parseFloat(value))
|
||||
}
|
||||
|
||||
await test.step(`Reset camera`, async () => {
|
||||
await u.openDebugPanel()
|
||||
await u.clearCommandLogs()
|
||||
await u.doAndWaitForCmd(async () => {
|
||||
await gizmo.click({ button: 'right' })
|
||||
await resetCameraButton.click()
|
||||
}, 'zoom_to_fit')
|
||||
await expect
|
||||
.poll(getCameraZValue, {
|
||||
message: 'Camera Z should be at expected position after reset',
|
||||
})
|
||||
.toEqual(expectedStartCamZPosition)
|
||||
})
|
||||
|
||||
// gray at this pixel means the stream has loaded in the most
|
||||
// user way we can verify it (pixel color)
|
||||
@ -901,7 +918,7 @@ extrude001 = extrude(200, sketch001)`)
|
||||
.poll(() => u.getGreatestPixDiff(pointOnModel, [143, 143, 143]), {
|
||||
timeout: 10_000,
|
||||
})
|
||||
.toBeLessThan(15)
|
||||
.toBeLessThan(30)
|
||||
|
||||
await expect(async () => {
|
||||
await page.mouse.move(0, 0, { steps: 5 })
|
||||
|
@ -471,7 +471,7 @@ test(
|
||||
|
||||
await page.mouse.move(startXPx + PUR * 30, 500 - PUR * 20, { steps: 10 })
|
||||
|
||||
await page.waitForTimeout(300)
|
||||
await page.waitForTimeout(1000)
|
||||
|
||||
await expect(page).toHaveScreenshot({
|
||||
maxDiffPixels: 100,
|
||||
@ -528,6 +528,7 @@ test(
|
||||
// Draw the rectangle
|
||||
await page.mouse.click(startXPx + PUR * 20, 500 - PUR * 30)
|
||||
await page.mouse.move(startXPx + PUR * 10, 500 - PUR * 10, { steps: 5 })
|
||||
await page.waitForTimeout(800)
|
||||
|
||||
// Ensure the draft rectangle looks the same as it usually does
|
||||
await expect(page).toHaveScreenshot({
|
||||
@ -895,7 +896,7 @@ test(
|
||||
// Wait for the second extrusion to appear
|
||||
// TODO: Find a way to truly know that the objects have finished
|
||||
// rendering, because an execution-done message is not sufficient.
|
||||
await page.waitForTimeout(1000)
|
||||
await page.waitForTimeout(2000)
|
||||
|
||||
await expect(page).toHaveScreenshot({
|
||||
maxDiffPixels: 100,
|
||||
@ -939,7 +940,7 @@ test(
|
||||
// Wait for the second extrusion to appear
|
||||
// TODO: Find a way to truly know that the objects have finished
|
||||
// rendering, because an execution-done message is not sufficient.
|
||||
await page.waitForTimeout(1000)
|
||||
await page.waitForTimeout(2000)
|
||||
|
||||
await expect(page).toHaveScreenshot({
|
||||
maxDiffPixels: 100,
|
||||
|
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 49 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 62 KiB |
Before Width: | Height: | Size: 37 KiB After Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 38 KiB After Width: | Height: | Size: 38 KiB |
Before Width: | Height: | Size: 39 KiB After Width: | Height: | Size: 39 KiB |
Before Width: | Height: | Size: 42 KiB After Width: | Height: | Size: 42 KiB |
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 36 KiB |
@ -47,6 +47,14 @@ export const commonPoints = {
|
||||
num2: 14.44,
|
||||
}
|
||||
|
||||
/** A semi-reliable color to check the default XZ plane on
|
||||
* in dark mode in the default camera position
|
||||
*/
|
||||
export const darkModePlaneColorXZ: [number, number, number] = [50, 50, 99]
|
||||
|
||||
/** A semi-reliable color to check the default dark mode bg color against */
|
||||
export const darkModeBgColor: [number, number, number] = [27, 27, 27]
|
||||
|
||||
export const editorSelector = '[role="textbox"][data-language="kcl"]'
|
||||
type PaneId = 'variables' | 'code' | 'files' | 'logs'
|
||||
|
||||
@ -463,6 +471,9 @@ export async function getUtils(page: Page, test_?: typeof test) {
|
||||
return test_?.step(
|
||||
`Create and select project with text "${hasText}"`,
|
||||
async () => {
|
||||
// Without this, we get unreliable project creation. It's probably
|
||||
// due to a race between the FS being read and clicking doing something.
|
||||
await page.waitForTimeout(100)
|
||||
await page.getByTestId('home-new-file').click()
|
||||
const projectLinksPost = page.getByTestId('project-link')
|
||||
await projectLinksPost.filter({ hasText }).click()
|
||||
@ -492,6 +503,11 @@ export async function getUtils(page: Page, test_?: typeof test) {
|
||||
|
||||
createNewFile: async (name: string) => {
|
||||
return test?.step(`Create a file named ${name}`, async () => {
|
||||
// If the application is in the middle of connecting a stream
|
||||
// then creating a new file won't work in the end.
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'Start Sketch' })
|
||||
).not.toBeDisabled()
|
||||
await page.getByTestId('create-file-button').click()
|
||||
await page.getByTestId('file-rename-field').fill(name)
|
||||
await page.keyboard.press('Enter')
|
||||
@ -874,8 +890,8 @@ export async function setupElectron({
|
||||
appSettings
|
||||
? { settings: appSettings }
|
||||
: {
|
||||
...TEST_SETTINGS,
|
||||
settings: {
|
||||
...TEST_SETTINGS,
|
||||
app: {
|
||||
...TEST_SETTINGS.app,
|
||||
projectDirectory: projectDirName,
|
||||
|
@ -9,7 +9,7 @@ import {
|
||||
executorInputPath,
|
||||
} from './test-utils'
|
||||
import { SaveSettingsPayload, SettingsLevel } from 'lib/settings/settingsTypes'
|
||||
import { SETTINGS_FILE_NAME } from 'lib/constants'
|
||||
import { SETTINGS_FILE_NAME, PROJECT_SETTINGS_FILE_NAME } from 'lib/constants'
|
||||
import {
|
||||
TEST_SETTINGS_KEY,
|
||||
TEST_SETTINGS_CORRUPTED,
|
||||
@ -445,6 +445,58 @@ test.describe('Testing settings', () => {
|
||||
}
|
||||
)
|
||||
|
||||
test(
|
||||
'project settings reload on external change',
|
||||
{ tag: '@electron' },
|
||||
async ({ browserName }, testInfo) => {
|
||||
const {
|
||||
electronApp,
|
||||
page,
|
||||
dir: projectDirName,
|
||||
} = await setupElectron({
|
||||
testInfo,
|
||||
})
|
||||
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
|
||||
const logoLink = page.getByTestId('app-logo')
|
||||
const projectDirLink = page.getByText('Loaded from')
|
||||
|
||||
await test.step('Wait for project view', async () => {
|
||||
await expect(projectDirLink).toBeVisible()
|
||||
})
|
||||
|
||||
const projectLinks = page.getByTestId('project-link')
|
||||
const oldCount = await projectLinks.count()
|
||||
await page.getByRole('button', { name: 'New project' }).click()
|
||||
await expect(projectLinks).toHaveCount(oldCount + 1)
|
||||
await projectLinks.filter({ hasText: 'project-000' }).first().click()
|
||||
|
||||
const changeColorFs = async (color: string) => {
|
||||
const tempSettingsFilePath = join(
|
||||
projectDirName,
|
||||
'project-000',
|
||||
PROJECT_SETTINGS_FILE_NAME
|
||||
)
|
||||
await fsp.writeFile(
|
||||
tempSettingsFilePath,
|
||||
`[settings.app]\nthemeColor = "${color}"`
|
||||
)
|
||||
}
|
||||
|
||||
await test.step('Check the color is first starting as we expect', async () => {
|
||||
await expect(logoLink).toHaveCSS('--primary-hue', '264.5')
|
||||
})
|
||||
|
||||
await test.step('Check color of logo changed', async () => {
|
||||
await changeColorFs('99')
|
||||
await expect(logoLink).toHaveCSS('--primary-hue', '99')
|
||||
})
|
||||
|
||||
await electronApp.close()
|
||||
}
|
||||
)
|
||||
|
||||
test(
|
||||
`Closing settings modal should go back to the original file being viewed`,
|
||||
{ tag: '@electron' },
|
||||
|
7
interface.d.ts
vendored
@ -20,10 +20,11 @@ export interface IElectronAPI {
|
||||
version: typeof process.env.version
|
||||
watchFileOn: (
|
||||
path: string,
|
||||
key: string,
|
||||
callback: (eventType: string, path: string) => void
|
||||
) => void
|
||||
watchFileOff: (path: string) => void
|
||||
readFile: (path: string) => ReturnType<fs.readFile>
|
||||
readFile: typeof fs.readFile
|
||||
watchFileOff: (path: string, key: string) => void
|
||||
writeFile: (
|
||||
path: string,
|
||||
data: string | Uint8Array
|
||||
@ -67,7 +68,7 @@ export interface IElectronAPI {
|
||||
}
|
||||
}
|
||||
kittycad: (access: string, args: any) => any
|
||||
listMachines: () => Promise<MachinesListing>
|
||||
listMachines: (machineApiIp: string) => Promise<MachinesListing>
|
||||
getMachineApiIp: () => Promise<string | null>
|
||||
onUpdateDownloadStart: (
|
||||
callback: (value: { version: string }) => void
|
||||
|
@ -70,7 +70,7 @@ echo ""
|
||||
echo "Suggested changelog:"
|
||||
echo "\`\`\`"
|
||||
echo "## What's Changed"
|
||||
git log $(git describe --tags --abbrev=0)..HEAD --oneline --pretty=format:%s | grep -v Bump | grep -v 'Cut release v' | awk '{print "* "toupper(substr($0,0,1))substr($0,2)}'
|
||||
git log $(git describe --tags --match="v[0-9]*" --abbrev=0)..HEAD --oneline --pretty=format:%s | grep -v Bump | grep -v 'Cut release v' | awk '{print "* "toupper(substr($0,0,1))substr($0,2)}'
|
||||
echo ""
|
||||
echo "**Full Changelog**: https://github.com/KittyCAD/modeling-app/compare/${latest_tag}...${new_version}"
|
||||
echo "\`\`\`"
|
||||
|
@ -36,38 +36,297 @@
|
||||
"description": "Extra machine-specific information regarding a connected machine.",
|
||||
"oneOf": [
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"Moonraker": {
|
||||
"type": "object"
|
||||
"type": {
|
||||
"enum": [
|
||||
"moonraker"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Moonraker"
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"Usb": {
|
||||
"type": "object"
|
||||
"type": {
|
||||
"enum": [
|
||||
"usb"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Usb"
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"Bambu": {
|
||||
"type": "object"
|
||||
"current_stage": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/Stage"
|
||||
}
|
||||
],
|
||||
"description": "The current stage of the machine as defined by Bambu which can include errors, etc.",
|
||||
"nullable": true
|
||||
},
|
||||
"nozzle_diameter": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/NozzleDiameter"
|
||||
}
|
||||
],
|
||||
"description": "The nozzle diameter of the machine."
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"bambu"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Bambu"
|
||||
"nozzle_diameter",
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"FdmHardwareConfiguration": {
|
||||
"description": "Configuration for a FDM-based printer.",
|
||||
"properties": {
|
||||
"filaments": {
|
||||
"description": "The filaments the printer has access to.",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Filament"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"nozzle_diameter": {
|
||||
"description": "Diameter of the extrusion nozzle, in mm.",
|
||||
"format": "double",
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"filaments",
|
||||
"nozzle_diameter"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"Filament": {
|
||||
"description": "Information about the filament being used in a FDM printer.",
|
||||
"properties": {
|
||||
"color": {
|
||||
"description": "The color (as hex without the `#`) of the filament, this is likely specific to the manufacturer.",
|
||||
"maxLength": 6,
|
||||
"minLength": 6,
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"material": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/FilamentMaterial"
|
||||
}
|
||||
],
|
||||
"description": "The material that the filament is made of."
|
||||
},
|
||||
"name": {
|
||||
"description": "The name of the filament, this is likely specfic to the manufacturer.",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"material"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"FilamentMaterial": {
|
||||
"description": "The material that the filament is made of.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Polylactic acid based plastics",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"pla"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Pla support",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"pla_support"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "acrylonitrile butadiene styrene based plastics",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"abs"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "polyethylene terephthalate glycol based plastics",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"petg"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "unsuprisingly, nylon based",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"nylon"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "thermoplastic polyurethane based urethane material",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"tpu"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "polyvinyl alcohol based material",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"pva"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "high impact polystyrene based material",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"hips"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "composite material with stuff in other stuff, something like PLA mixed with carbon fiber, kevlar, or fiberglass",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"composite"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
]
|
||||
},
|
||||
"HardwareConfiguration": {
|
||||
"description": "The hardware configuration of a machine.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "No configuration is possible. This isn't the same conceptually as an `Option<HardwareConfiguration>`, because this indicates we positively know there is no possible configuration changes that are possible with this method of manufcture.",
|
||||
"properties": {
|
||||
"type": {
|
||||
"enum": [
|
||||
"none"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Hardware configuration specific to FDM based printers",
|
||||
"properties": {
|
||||
"config": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/FdmHardwareConfiguration"
|
||||
}
|
||||
],
|
||||
"description": "The configuration for the FDM printer."
|
||||
},
|
||||
"type": {
|
||||
"enum": [
|
||||
"fdm"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"config",
|
||||
"type"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
@ -85,6 +344,14 @@
|
||||
"description": "Additional, per-machine information which is specific to the underlying machine type.",
|
||||
"nullable": true
|
||||
},
|
||||
"hardware_configuration": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/HardwareConfiguration"
|
||||
}
|
||||
],
|
||||
"description": "Information about how the Machine is currently configured."
|
||||
},
|
||||
"id": {
|
||||
"description": "Machine Identifier (ID) for the specific Machine.",
|
||||
"type": "string"
|
||||
@ -114,6 +381,12 @@
|
||||
"description": "Maximum part size that can be manufactured by this device. This may be some sort of theoretical upper bound, getting close to this limit seems like maybe a bad idea.\n\nThis may be `None` if the maximum size is not knowable by the Machine API.\n\nWhat \"close\" means is up to you!",
|
||||
"nullable": true
|
||||
},
|
||||
"progress": {
|
||||
"description": "Progress of the current print, if printing.",
|
||||
"format": "double",
|
||||
"nullable": true,
|
||||
"type": "number"
|
||||
},
|
||||
"state": {
|
||||
"allOf": [
|
||||
{
|
||||
@ -124,6 +397,7 @@
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"hardware_configuration",
|
||||
"id",
|
||||
"machine_type",
|
||||
"make_model",
|
||||
@ -157,57 +431,111 @@
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "If a print state can not be resolved at this time, an Unknown may be returned.",
|
||||
"enum": [
|
||||
"Unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Idle, and ready for another job.",
|
||||
"enum": [
|
||||
"Idle"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Running a job -- 3D printing or CNC-ing a part.",
|
||||
"enum": [
|
||||
"Running"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Machine is currently offline or unreachable.",
|
||||
"enum": [
|
||||
"Offline"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Job is underway but halted, waiting for some action to take place.",
|
||||
"enum": [
|
||||
"Paused"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Job is finished, but waiting manual action to move back to Idle.",
|
||||
"enum": [
|
||||
"Complete"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"additionalProperties": false,
|
||||
"description": "The printer has failed and is in an unknown state that may require manual attention to resolve. The inner value is a human readable description of what specifically has failed.",
|
||||
"properties": {
|
||||
"Failed": {
|
||||
"nullable": true,
|
||||
"state": {
|
||||
"enum": [
|
||||
"unknown"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"Failed"
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Idle, and ready for another job.",
|
||||
"properties": {
|
||||
"state": {
|
||||
"enum": [
|
||||
"idle"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Running a job -- 3D printing or CNC-ing a part.",
|
||||
"properties": {
|
||||
"state": {
|
||||
"enum": [
|
||||
"running"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Machine is currently offline or unreachable.",
|
||||
"properties": {
|
||||
"state": {
|
||||
"enum": [
|
||||
"offline"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Job is underway but halted, waiting for some action to take place.",
|
||||
"properties": {
|
||||
"state": {
|
||||
"enum": [
|
||||
"paused"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "Job is finished, but waiting manual action to move back to Idle.",
|
||||
"properties": {
|
||||
"state": {
|
||||
"enum": [
|
||||
"complete"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"description": "The printer has failed and is in an unknown state that may require manual attention to resolve. The inner value is a human readable description of what specifically has failed.",
|
||||
"properties": {
|
||||
"message": {
|
||||
"description": "A human-readable message describing the failure.",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"enum": [
|
||||
"failed"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"state"
|
||||
],
|
||||
"type": "object"
|
||||
}
|
||||
@ -219,21 +547,54 @@
|
||||
{
|
||||
"description": "Use light to cure a resin to build up layers.",
|
||||
"enum": [
|
||||
"Stereolithography"
|
||||
"stereolithography"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Fused Deposition Modeling, layers of melted plastic.",
|
||||
"enum": [
|
||||
"FusedDeposition"
|
||||
"fused_deposition"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "\"Computer numerical control\" - machine that grinds away material from a hunk of material to construct a part.",
|
||||
"enum": [
|
||||
"Cnc"
|
||||
"cnc"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"NozzleDiameter": {
|
||||
"description": "A nozzle diameter.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "0.2mm.",
|
||||
"enum": [
|
||||
"0.2"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "0.4mm.",
|
||||
"enum": [
|
||||
"0.4"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "0.6mm.",
|
||||
"enum": [
|
||||
"0.6"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "0.8mm.",
|
||||
"enum": [
|
||||
"0.8"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
@ -284,6 +645,15 @@
|
||||
"machine_id": {
|
||||
"description": "The machine id to print to.",
|
||||
"type": "string"
|
||||
},
|
||||
"slicer_configuration": {
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/SlicerConfiguration"
|
||||
}
|
||||
],
|
||||
"description": "Requested design-specific slicer configurations.",
|
||||
"nullable": true
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@ -292,6 +662,283 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SlicerConfiguration": {
|
||||
"description": "The slicer configuration is a set of parameters that are passed to the slicer to control how the gcode is generated.",
|
||||
"properties": {
|
||||
"filament_idx": {
|
||||
"description": "The filament to use for the print.",
|
||||
"format": "uint",
|
||||
"minimum": 0,
|
||||
"nullable": true,
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"Stage": {
|
||||
"description": "The print stage. These come from: https://github.com/SoftFever/OrcaSlicer/blob/431978baf17961df90f0d01871b0ad1d839d7f5d/src/slic3r/GUI/DeviceManager.cpp#L78",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "Nothing.",
|
||||
"enum": [
|
||||
"nothing"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Empty.",
|
||||
"enum": [
|
||||
"empty"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Auto bed leveling.",
|
||||
"enum": [
|
||||
"auto_bed_leveling"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Heatbed preheating.",
|
||||
"enum": [
|
||||
"heatbed_preheating"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Sweeping XY mech mode.",
|
||||
"enum": [
|
||||
"sweeping_xy_mech_mode"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Changing filament.",
|
||||
"enum": [
|
||||
"changing_filament"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "M400 pause.",
|
||||
"enum": [
|
||||
"m400_pause"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to filament runout.",
|
||||
"enum": [
|
||||
"paused_due_to_filament_runout"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Heating hotend.",
|
||||
"enum": [
|
||||
"heating_hotend"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Calibrating extrusion.",
|
||||
"enum": [
|
||||
"calibrating_extrusion"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Scanning bed surface.",
|
||||
"enum": [
|
||||
"scanning_bed_surface"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Inspecting first layer.",
|
||||
"enum": [
|
||||
"inspecting_first_layer"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Identifying build plate type.",
|
||||
"enum": [
|
||||
"identifying_build_plate_type"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Calibrating micro lidar.",
|
||||
"enum": [
|
||||
"calibrating_micro_lidar"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Homing toolhead.",
|
||||
"enum": [
|
||||
"homing_toolhead"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Cleaning nozzle tip.",
|
||||
"enum": [
|
||||
"cleaning_nozzle_tip"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Checking extruder temperature.",
|
||||
"enum": [
|
||||
"checking_extruder_temperature"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Printing was paused by the user.",
|
||||
"enum": [
|
||||
"printing_was_paused_by_the_user"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Pause of front cover falling.",
|
||||
"enum": [
|
||||
"pause_of_front_cover_falling"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Calibrating micro lidar.",
|
||||
"enum": [
|
||||
"calibrating_micro_lidar2"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Calibrating extrusion flow.",
|
||||
"enum": [
|
||||
"calibrating_extrusion_flow"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to nozzle temperature malfunction.",
|
||||
"enum": [
|
||||
"paused_due_to_nozzle_temperature_malfunction"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to heat bed temperature malfunction.",
|
||||
"enum": [
|
||||
"paused_due_to_heat_bed_temperature_malfunction"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Filament unloading.",
|
||||
"enum": [
|
||||
"filament_unloading"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Skip step pause.",
|
||||
"enum": [
|
||||
"skip_step_pause"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Filament loading.",
|
||||
"enum": [
|
||||
"filament_loading"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Motor noise calibration.",
|
||||
"enum": [
|
||||
"motor_noise_calibration"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to AMS lost.",
|
||||
"enum": [
|
||||
"paused_due_to_ams_lost"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to low speed of the heat break fan.",
|
||||
"enum": [
|
||||
"paused_due_to_low_speed_of_the_heat_break_fan"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused due to chamber temperature control error.",
|
||||
"enum": [
|
||||
"paused_due_to_chamber_temperature_control_error"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Cooling chamber.",
|
||||
"enum": [
|
||||
"cooling_chamber"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Paused by the Gcode inserted by the user.",
|
||||
"enum": [
|
||||
"paused_by_the_gcode_inserted_by_the_user"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Motor noise showoff.",
|
||||
"enum": [
|
||||
"motor_noise_showoff"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Nozzle filament covered detected pause.",
|
||||
"enum": [
|
||||
"nozzle_filament_covered_detected_pause"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Cutter error pause.",
|
||||
"enum": [
|
||||
"cutter_error_pause"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "First layer error pause.",
|
||||
"enum": [
|
||||
"first_layer_error_pause"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"description": "Nozzle clog pause.",
|
||||
"enum": [
|
||||
"nozzle_clog_pause"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Volume": {
|
||||
"description": "Set of three values to represent the extent of a 3-D Volume. This contains the width, depth, and height values, generally used to represent some maximum or minimum.\n\nAll measurements are in millimeters.",
|
||||
"properties": {
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "zoo-modeling-app",
|
||||
"version": "0.25.6",
|
||||
"version": "0.26.0",
|
||||
"private": true,
|
||||
"productName": "Zoo Modeling App",
|
||||
"author": {
|
||||
|
@ -64,6 +64,27 @@ export type ReactCameraProperties =
|
||||
|
||||
const lastCmdDelay = 50
|
||||
|
||||
class CameraRateLimiter {
|
||||
lastSend?: Date = undefined
|
||||
rateLimitMs: number = 16 //60 FPS
|
||||
|
||||
send = (f: () => void) => {
|
||||
let now = new Date()
|
||||
|
||||
if (
|
||||
this.lastSend === undefined ||
|
||||
now.getTime() - this.lastSend.getTime() > this.rateLimitMs
|
||||
) {
|
||||
f()
|
||||
this.lastSend = now
|
||||
}
|
||||
}
|
||||
|
||||
reset = () => {
|
||||
this.lastSend = undefined
|
||||
}
|
||||
}
|
||||
|
||||
export class CameraControls {
|
||||
engineCommandManager: EngineCommandManager
|
||||
syncDirection: 'clientToEngine' | 'engineToClient' = 'engineToClient'
|
||||
@ -77,9 +98,8 @@ export class CameraControls {
|
||||
enableRotate = true
|
||||
enablePan = true
|
||||
enableZoom = true
|
||||
zoomDataFromLastFrame?: number = undefined
|
||||
// holds coordinates, and interaction
|
||||
moveDataFromLastFrame?: [number, number, string] = undefined
|
||||
moveSender: CameraRateLimiter = new CameraRateLimiter()
|
||||
zoomSender: CameraRateLimiter = new CameraRateLimiter()
|
||||
lastPerspectiveFov: number = 45
|
||||
pendingZoom: number | null = null
|
||||
pendingRotation: Vector2 | null = null
|
||||
@ -171,6 +191,36 @@ export class CameraControls {
|
||||
}
|
||||
}
|
||||
|
||||
doMove = (interaction: any, coordinates: any) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'camera_drag_move',
|
||||
interaction: interaction,
|
||||
window: {
|
||||
x: coordinates[0],
|
||||
y: coordinates[1],
|
||||
},
|
||||
},
|
||||
cmd_id: uuidv4(),
|
||||
})
|
||||
}
|
||||
|
||||
doZoom = (zoom: number) => {
|
||||
this.handleStart()
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'default_camera_zoom',
|
||||
magnitude: (-1 * zoom) / window.devicePixelRatio,
|
||||
},
|
||||
cmd_id: uuidv4(),
|
||||
})
|
||||
this.handleEnd()
|
||||
}
|
||||
|
||||
constructor(
|
||||
isOrtho = false,
|
||||
domElement: HTMLCanvasElement,
|
||||
@ -258,49 +308,6 @@ export class CameraControls {
|
||||
this.onCameraChange()
|
||||
}
|
||||
|
||||
// Our stream is never more than 60fps.
|
||||
// We can get away with capping our "virtual fps" to 60 then.
|
||||
const FPS_VIRTUAL = 60
|
||||
|
||||
const doZoom = () => {
|
||||
if (this.zoomDataFromLastFrame !== undefined) {
|
||||
this.handleStart()
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'default_camera_zoom',
|
||||
magnitude:
|
||||
(-1 * this.zoomDataFromLastFrame) / window.devicePixelRatio,
|
||||
},
|
||||
cmd_id: uuidv4(),
|
||||
})
|
||||
this.handleEnd()
|
||||
}
|
||||
this.zoomDataFromLastFrame = undefined
|
||||
}
|
||||
setInterval(doZoom, 1000 / FPS_VIRTUAL)
|
||||
|
||||
const doMove = () => {
|
||||
if (this.moveDataFromLastFrame !== undefined) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
this.engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'camera_drag_move',
|
||||
interaction: this.moveDataFromLastFrame[2] as any,
|
||||
window: {
|
||||
x: this.moveDataFromLastFrame[0],
|
||||
y: this.moveDataFromLastFrame[1],
|
||||
},
|
||||
},
|
||||
cmd_id: uuidv4(),
|
||||
})
|
||||
}
|
||||
this.moveDataFromLastFrame = undefined
|
||||
}
|
||||
setInterval(doMove, 1000 / FPS_VIRTUAL)
|
||||
|
||||
setTimeout(() => {
|
||||
this.engineCommandManager.subscribeTo({
|
||||
event: 'camera_drag_end',
|
||||
@ -386,7 +393,9 @@ export class CameraControls {
|
||||
if (interaction === 'none') return
|
||||
|
||||
if (this.syncDirection === 'engineToClient') {
|
||||
this.moveDataFromLastFrame = [event.clientX, event.clientY, interaction]
|
||||
this.moveSender.send(() => {
|
||||
this.doMove(interaction, [event.clientX, event.clientY])
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
@ -459,7 +468,9 @@ export class CameraControls {
|
||||
|
||||
if (this.syncDirection === 'engineToClient') {
|
||||
if (interaction === 'zoom') {
|
||||
this.zoomDataFromLastFrame = event.deltaY
|
||||
this.zoomSender.send(() => {
|
||||
this.doZoom(event.deltaY)
|
||||
})
|
||||
} else {
|
||||
// This case will get handled when we add pan and rotate using Apple trackpad.
|
||||
console.error(
|
||||
|
@ -135,7 +135,9 @@ function CommandArgOptionInput({
|
||||
<Combobox.Input
|
||||
id="option-input"
|
||||
ref={inputRef}
|
||||
onChange={(event) => setQuery(event.target.value)}
|
||||
onChange={(event) =>
|
||||
!event.target.disabled && setQuery(event.target.value)
|
||||
}
|
||||
className="flex-grow px-2 py-1 border-b border-b-chalkboard-100 dark:border-b-chalkboard-80 !bg-transparent focus:outline-none"
|
||||
onKeyDown={(event) => {
|
||||
if (event.metaKey && event.key === 'k')
|
||||
@ -175,9 +177,18 @@ function CommandArgOptionInput({
|
||||
<Combobox.Option
|
||||
key={option.name}
|
||||
value={option}
|
||||
disabled={option.disabled}
|
||||
className="flex items-center gap-2 px-4 py-1 first:mt-2 last:mb-2 ui-active:bg-primary/10 dark:ui-active:bg-chalkboard-90"
|
||||
>
|
||||
<p className="flex-grow">{option.name} </p>
|
||||
<p
|
||||
className={`flex-grow ${
|
||||
(option.disabled &&
|
||||
'text-chalkboard-70 dark:text-chalkboard-50 cursor-not-allowed') ||
|
||||
''
|
||||
}`}
|
||||
>
|
||||
{option.name}
|
||||
</p>
|
||||
{option.value === currentOption?.value && (
|
||||
<small className="text-chalkboard-70 dark:text-chalkboard-50">
|
||||
current
|
||||
|
@ -2,7 +2,7 @@ import type { IndexLoaderData } from 'lib/types'
|
||||
import { PATHS } from 'lib/paths'
|
||||
import { ActionButton } from './ActionButton'
|
||||
import Tooltip from './Tooltip'
|
||||
import { Dispatch, useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { Dispatch, useCallback, useRef, useState } from 'react'
|
||||
import { useNavigate, useRouteLoaderData } from 'react-router-dom'
|
||||
import { Disclosure } from '@headlessui/react'
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
|
||||
@ -13,7 +13,6 @@ import { sortProject } from 'lib/desktopFS'
|
||||
import { FILE_EXT } from 'lib/constants'
|
||||
import { CustomIcon } from './CustomIcon'
|
||||
import { codeManager, kclManager } from 'lib/singletons'
|
||||
import { useDocumentHasFocus } from 'hooks/useDocumentHasFocus'
|
||||
import { useLspContext } from './LspProvider'
|
||||
import useHotkeyWrapper from 'lib/hotkeyWrapper'
|
||||
import { useModelingContext } from 'hooks/useModelingContext'
|
||||
@ -21,6 +20,8 @@ import { DeleteConfirmationDialog } from './ProjectCard/DeleteProjectDialog'
|
||||
import { ContextMenu, ContextMenuItem } from './ContextMenu'
|
||||
import usePlatform from 'hooks/usePlatform'
|
||||
import { FileEntry } from 'lib/project'
|
||||
import { useFileSystemWatcher } from 'hooks/useFileSystemWatcher'
|
||||
import { normalizeLineEndings } from 'lib/codeEditor'
|
||||
|
||||
function getIndentationCSS(level: number) {
|
||||
return `calc(1rem * ${level + 1})`
|
||||
@ -131,6 +132,23 @@ const FileTreeItem = ({
|
||||
const isCurrentFile = fileOrDir.path === currentFile?.path
|
||||
const itemRef = useRef(null)
|
||||
|
||||
// Since every file or directory gets its own FileTreeItem, we can do this.
|
||||
// Because subtrees only render when they are opened, that means this
|
||||
// only listens when they open. Because this acts like a useEffect, when
|
||||
// the ReactNodes are destroyed, so is this listener :)
|
||||
useFileSystemWatcher(
|
||||
async (eventType, path) => {
|
||||
// Don't try to read a file that was removed.
|
||||
if (isCurrentFile && eventType !== 'unlink') {
|
||||
let code = await window.electron.readFile(path, { encoding: 'utf-8' })
|
||||
code = normalizeLineEndings(code)
|
||||
codeManager.updateCodeStateEditor(code)
|
||||
}
|
||||
fileSend({ type: 'Refresh' })
|
||||
},
|
||||
[fileOrDir.path]
|
||||
)
|
||||
|
||||
const isRenaming = fileContext.itemsBeingRenamed.includes(fileOrDir.path)
|
||||
const removeCurrentItemFromRenaming = useCallback(
|
||||
() =>
|
||||
@ -154,6 +172,13 @@ const FileTreeItem = ({
|
||||
})
|
||||
}, [fileContext.itemsBeingRenamed, fileOrDir.path, fileSend])
|
||||
|
||||
const clickDirectory = () => {
|
||||
fileSend({
|
||||
type: 'Set selected directory',
|
||||
directory: fileOrDir,
|
||||
})
|
||||
}
|
||||
|
||||
function handleKeyUp(e: React.KeyboardEvent<HTMLButtonElement>) {
|
||||
if (e.metaKey && e.key === 'Backspace') {
|
||||
// Open confirmation dialog
|
||||
@ -242,18 +267,8 @@ const FileTreeItem = ({
|
||||
}
|
||||
style={{ paddingInlineStart: getIndentationCSS(level) }}
|
||||
onClick={(e) => e.currentTarget.focus()}
|
||||
onClickCapture={(e) =>
|
||||
fileSend({
|
||||
type: 'Set selected directory',
|
||||
directory: fileOrDir,
|
||||
})
|
||||
}
|
||||
onFocusCapture={(e) =>
|
||||
fileSend({
|
||||
type: 'Set selected directory',
|
||||
directory: fileOrDir,
|
||||
})
|
||||
}
|
||||
onClickCapture={clickDirectory}
|
||||
onFocusCapture={clickDirectory}
|
||||
onKeyDown={(e) => e.key === 'Enter' && e.preventDefault()}
|
||||
onKeyUp={handleKeyUp}
|
||||
>
|
||||
@ -469,27 +484,36 @@ export const FileTreeInner = ({
|
||||
const loaderData = useRouteLoaderData(PATHS.FILE) as IndexLoaderData
|
||||
const { send: fileSend, context: fileContext } = useFileContext()
|
||||
const { send: modelingSend } = useModelingContext()
|
||||
const documentHasFocus = useDocumentHasFocus()
|
||||
|
||||
// Refresh the file tree when the document gets focus
|
||||
useEffect(() => {
|
||||
fileSend({ type: 'Refresh' })
|
||||
}, [documentHasFocus])
|
||||
// Refresh the file tree when there are changes.
|
||||
useFileSystemWatcher(
|
||||
async (eventType, path) => {
|
||||
// Our other watcher races with this watcher on the current file changes,
|
||||
// so we need to stop this one from reacting at all, otherwise Bad Things
|
||||
// Happen™.
|
||||
const isCurrentFile = loaderData.file?.path === path
|
||||
const hasChanged = eventType === 'change'
|
||||
if (isCurrentFile && hasChanged) return
|
||||
fileSend({ type: 'Refresh' })
|
||||
},
|
||||
[loaderData?.project?.path, fileContext.selectedDirectory.path].filter(
|
||||
(x: string | undefined) => x !== undefined
|
||||
)
|
||||
)
|
||||
|
||||
const clickDirectory = () => {
|
||||
fileSend({
|
||||
type: 'Set selected directory',
|
||||
directory: fileContext.project,
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className="overflow-auto pb-12 absolute inset-0"
|
||||
data-testid="file-pane-scroll-container"
|
||||
>
|
||||
<ul
|
||||
className="m-0 p-0 text-sm"
|
||||
onClickCapture={(e) => {
|
||||
fileSend({
|
||||
type: 'Set selected directory',
|
||||
directory: fileContext.project,
|
||||
})
|
||||
}}
|
||||
>
|
||||
<ul className="m-0 p-0 text-sm" onClickCapture={clickDirectory}>
|
||||
{sortProject(fileContext.project?.children || []).map((fileOrDir) => (
|
||||
<FileTreeItem
|
||||
project={fileContext.project}
|
||||
|
@ -69,7 +69,7 @@ import { exportFromEngine } from 'lib/exportFromEngine'
|
||||
import { Models } from '@kittycad/lib/dist/types/src'
|
||||
import toast from 'react-hot-toast'
|
||||
import { EditorSelection, Transaction } from '@codemirror/state'
|
||||
import { useNavigate, useSearchParams } from 'react-router-dom'
|
||||
import { useLoaderData, useNavigate, useSearchParams } from 'react-router-dom'
|
||||
import { letEngineAnimateAndSyncCamAfter } from 'clientSideScene/CameraControls'
|
||||
import { getVarNameModal } from 'hooks/useToolbarGuards'
|
||||
import { err, reportRejection, trap } from 'lib/trap'
|
||||
@ -84,6 +84,7 @@ import {
|
||||
import { submitAndAwaitTextToKcl } from 'lib/textToCad'
|
||||
import { useFileContext } from 'hooks/useFileContext'
|
||||
import { uuidv4 } from 'lib/utils'
|
||||
import { IndexLoaderData } from 'lib/types'
|
||||
|
||||
type MachineContext<T extends AnyStateMachine> = {
|
||||
state: StateFrom<T>
|
||||
@ -116,6 +117,7 @@ export const ModelingMachineProvider = ({
|
||||
} = useSettingsAuthContext()
|
||||
const navigate = useNavigate()
|
||||
const { context, send: fileMachineSend } = useFileContext()
|
||||
const { file } = useLoaderData() as IndexLoaderData
|
||||
const token = auth?.context?.token
|
||||
const streamRef = useRef<HTMLDivElement>(null)
|
||||
const persistedContext = useMemo(() => getPersistedContext(), [])
|
||||
@ -409,12 +411,15 @@ export const ModelingMachineProvider = ({
|
||||
Make: ({ event }) => {
|
||||
if (event.type !== 'Make') return
|
||||
// Check if we already have an export intent.
|
||||
if (engineCommandManager.exportIntent) {
|
||||
if (engineCommandManager.exportInfo) {
|
||||
toast.error('Already exporting')
|
||||
return
|
||||
}
|
||||
// Set the export intent.
|
||||
engineCommandManager.exportIntent = ExportIntent.Make
|
||||
engineCommandManager.exportInfo = {
|
||||
intent: ExportIntent.Make,
|
||||
name: file?.name || '',
|
||||
}
|
||||
|
||||
// Set the current machine.
|
||||
machineManager.currentMachine = event.data.machine
|
||||
@ -443,12 +448,16 @@ export const ModelingMachineProvider = ({
|
||||
},
|
||||
'Engine export': ({ event }) => {
|
||||
if (event.type !== 'Export') return
|
||||
if (engineCommandManager.exportIntent) {
|
||||
if (engineCommandManager.exportInfo) {
|
||||
toast.error('Already exporting')
|
||||
return
|
||||
}
|
||||
// Set the export intent.
|
||||
engineCommandManager.exportIntent = ExportIntent.Save
|
||||
engineCommandManager.exportInfo = {
|
||||
intent: ExportIntent.Save,
|
||||
// This never gets used its only for make.
|
||||
name: '',
|
||||
}
|
||||
|
||||
const format = {
|
||||
...event.data,
|
||||
|
@ -11,6 +11,7 @@ export const NetworkMachineIndicator = ({
|
||||
}) => {
|
||||
const machineCount = machineManager.machineCount()
|
||||
const reason = machineManager.noMachinesReason()
|
||||
const machines = machineManager.machines
|
||||
|
||||
return isDesktop() ? (
|
||||
<Popover className="relative">
|
||||
@ -46,20 +47,34 @@ export const NetworkMachineIndicator = ({
|
||||
</div>
|
||||
{machineCount > 0 && (
|
||||
<ul className="divide-y divide-chalkboard-20 dark:divide-chalkboard-80">
|
||||
{Object.entries(machineManager.machines).map(
|
||||
([hostname, machine]) => (
|
||||
<li key={hostname} className={'px-2 py-4 gap-1 last:mb-0 '}>
|
||||
<p className="">
|
||||
{machine.make_model.model ||
|
||||
machine.make_model.manufacturer ||
|
||||
'Unknown Machine'}
|
||||
</p>
|
||||
{machines.map((machine) => {
|
||||
return (
|
||||
<li key={machine.id} className={'px-2 py-4 gap-1 last:mb-0 '}>
|
||||
<p className="">{machine.id.toUpperCase()}</p>
|
||||
<p className="text-chalkboard-60 dark:text-chalkboard-50 text-xs">
|
||||
Hostname {hostname}
|
||||
{machine.make_model.model}
|
||||
</p>
|
||||
{machine.extra &&
|
||||
machine.extra.type === 'bambu' &&
|
||||
machine.extra.nozzle_diameter && (
|
||||
<p className="text-chalkboard-60 dark:text-chalkboard-50 text-xs">
|
||||
Nozzle Diameter: {machine.extra.nozzle_diameter}
|
||||
</p>
|
||||
)}
|
||||
<p className="text-chalkboard-60 dark:text-chalkboard-50 text-xs">
|
||||
{`Status: ${machine.state.state
|
||||
.charAt(0)
|
||||
.toUpperCase()}${machine.state.state.slice(1)}`}
|
||||
{machine.state.state === 'failed' && machine.state.message
|
||||
? ` (${machine.state.message})`
|
||||
: ''}
|
||||
{machine.state.state === 'running' && machine.progress
|
||||
? ` (${Math.round(machine.progress)}%)`
|
||||
: ''}
|
||||
</p>
|
||||
</li>
|
||||
)
|
||||
)}
|
||||
})}
|
||||
</ul>
|
||||
)}
|
||||
</Popover.Panel>
|
||||
|
@ -221,6 +221,19 @@ export const SettingsAuthProviderBase = ({
|
||||
|
||||
useFileSystemWatcher(
|
||||
async () => {
|
||||
// If there is a projectPath but it no longer exists it means
|
||||
// it was exterally removed. If we let the code past this condition
|
||||
// execute it will recreate the directory due to code in
|
||||
// loadAndValidateSettings trying to recreate files. I do not
|
||||
// wish to change the behavior in case anything else uses it.
|
||||
// Go home.
|
||||
if (loadedProject?.project?.path) {
|
||||
if (!window.electron.exists(loadedProject?.project?.path)) {
|
||||
navigate(PATHS.HOME)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const data = await loadAndValidateSettings(loadedProject?.project?.path)
|
||||
settingsSend({
|
||||
type: 'Set all settings',
|
||||
@ -228,7 +241,9 @@ export const SettingsAuthProviderBase = ({
|
||||
doNotPersist: true,
|
||||
})
|
||||
},
|
||||
settingsPath ? [settingsPath] : []
|
||||
[settingsPath, loadedProject?.project?.path].filter(
|
||||
(x: string | undefined) => x !== undefined
|
||||
)
|
||||
)
|
||||
|
||||
// Add settings commands to the command bar
|
||||
|
@ -11,7 +11,7 @@ export const kclHighlight = styleTags({
|
||||
nil: t.null,
|
||||
'AddOp MultOp ExpOp': t.arithmeticOperator,
|
||||
BangOp: t.logicOperator,
|
||||
CompOp: t.logicOperator,
|
||||
CompOp: t.compareOperator,
|
||||
'Equals Arrow': t.definitionOperator,
|
||||
PipeOperator: t.controlOperator,
|
||||
String: t.string,
|
||||
|
@ -90,7 +90,7 @@ commaSep1NoTrailingComma<term> { term ("," term)* }
|
||||
MultOp { "/" | "*" | "\\" }
|
||||
ExpOp { "^" }
|
||||
BangOp { "!" }
|
||||
CompOp { $[<>] "="? | "!=" | "==" }
|
||||
CompOp { "==" | "!=" | "<=" | ">=" | "<" | ">" }
|
||||
Equals { "=" }
|
||||
Arrow { "=>" }
|
||||
PipeOperator { "|>" }
|
||||
|
@ -12,35 +12,51 @@ type Path = string
|
||||
// watcher.addListener(() => { ... }).
|
||||
|
||||
export const useFileSystemWatcher = (
|
||||
callback: (path: Path) => Promise<void>,
|
||||
dependencyArray: Path[]
|
||||
callback: (eventType: string, path: Path) => Promise<void>,
|
||||
paths: Path[]
|
||||
): void => {
|
||||
// Track a ref to the callback. This is how we get the callback updated
|
||||
// across the NodeJS<->Browser boundary.
|
||||
const callbackRef = useRef<{ fn: (path: Path) => Promise<void> }>({
|
||||
fn: async (_path) => {},
|
||||
})
|
||||
// Used to track this instance of useFileSystemWatcher.
|
||||
// Assign to ref so it doesn't change between renders.
|
||||
const key = useRef(Math.random().toString())
|
||||
|
||||
const [output, setOutput] = useState<
|
||||
{ eventType: string; path: string } | undefined
|
||||
>(undefined)
|
||||
|
||||
// Used to track if paths list changes.
|
||||
const [pathsTracked, setPathsTracked] = useState<Path[]>([])
|
||||
|
||||
useEffect(() => {
|
||||
callbackRef.current.fn = callback
|
||||
}, [callback])
|
||||
|
||||
// Used to track if dependencyArrray changes.
|
||||
const [dependencyArrayTracked, setDependencyArrayTracked] = useState<Path[]>(
|
||||
[]
|
||||
)
|
||||
if (!output) return
|
||||
callback(output.eventType, output.path).catch(reportRejection)
|
||||
}, [output])
|
||||
|
||||
// On component teardown obliterate all watchers.
|
||||
useEffect(() => {
|
||||
// The hook is useless on web.
|
||||
if (!isDesktop()) return
|
||||
|
||||
const cbWatcher = (eventType: string, path: string) => {
|
||||
setOutput({ eventType, path })
|
||||
}
|
||||
|
||||
for (let path of pathsTracked) {
|
||||
// Because functions don't retain refs between NodeJS-Browser I need to
|
||||
// pass an identifying key so we can later remove it.
|
||||
// A way to think of the function call is:
|
||||
// "For this path, add a new handler with this key"
|
||||
// "There can be many keys (functions) per path"
|
||||
// Again if refs were preserved, we wouldn't need to do this. Keys
|
||||
// gives us uniqueness.
|
||||
window.electron.watchFileOn(path, key.current, cbWatcher)
|
||||
}
|
||||
|
||||
return () => {
|
||||
for (let path of dependencyArray) {
|
||||
window.electron.watchFileOff(path)
|
||||
for (let path of pathsTracked) {
|
||||
window.electron.watchFileOff(path, key.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
}, [pathsTracked])
|
||||
|
||||
function difference<T>(l1: T[], l2: T[]): [T[], T[]] {
|
||||
return [
|
||||
@ -49,8 +65,7 @@ export const useFileSystemWatcher = (
|
||||
]
|
||||
}
|
||||
|
||||
const hasDiff =
|
||||
difference(dependencyArray, dependencyArrayTracked)[0].length !== 0
|
||||
const hasDiff = difference(paths, pathsTracked)[0].length !== 0
|
||||
|
||||
// Removing 1 watcher at a time is only possible because in a filesystem,
|
||||
// a path is unique (there can never be two paths with the same name).
|
||||
@ -61,19 +76,8 @@ export const useFileSystemWatcher = (
|
||||
|
||||
if (!hasDiff) return
|
||||
|
||||
const [pathsRemoved, pathsRemaining] = difference(
|
||||
dependencyArrayTracked,
|
||||
dependencyArray
|
||||
)
|
||||
for (let path of pathsRemoved) {
|
||||
window.electron.watchFileOff(path)
|
||||
}
|
||||
const [pathsAdded] = difference(dependencyArray, dependencyArrayTracked)
|
||||
for (let path of pathsAdded) {
|
||||
window.electron.watchFileOn(path, (_eventType: string, path: Path) => {
|
||||
callbackRef.current.fn(path).catch(reportRejection)
|
||||
})
|
||||
}
|
||||
setDependencyArrayTracked(pathsRemaining.concat(pathsAdded))
|
||||
const [, pathsRemaining] = difference(pathsTracked, paths)
|
||||
const [pathsAdded] = difference(paths, pathsTracked)
|
||||
setPathsTracked(pathsRemaining.concat(pathsAdded))
|
||||
}, [hasDiff])
|
||||
}
|
||||
|
@ -40,9 +40,7 @@ export class KclManager {
|
||||
nonCodeMeta: {
|
||||
nonCodeNodes: {},
|
||||
start: [],
|
||||
digest: null,
|
||||
},
|
||||
digest: null,
|
||||
}
|
||||
private _execState: ExecState = emptyExecState()
|
||||
private _programMemory: ProgramMemory = ProgramMemory.empty()
|
||||
@ -208,9 +206,7 @@ export class KclManager {
|
||||
nonCodeMeta: {
|
||||
nonCodeNodes: {},
|
||||
start: [],
|
||||
digest: null,
|
||||
},
|
||||
digest: null,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,6 @@ const sk2 = startSketchOn('XY')
|
||||
start: 114,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
digest: null,
|
||||
},
|
||||
id: expect.any(String),
|
||||
sourceRange: [95, 117],
|
||||
@ -223,7 +222,6 @@ const sk2 = startSketchOn('XY')
|
||||
start: 114,
|
||||
type: 'TagDeclarator',
|
||||
value: 'p',
|
||||
digest: null,
|
||||
},
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
@ -266,7 +264,6 @@ const sk2 = startSketchOn('XY')
|
||||
start: 417,
|
||||
type: 'TagDeclarator',
|
||||
value: 'o',
|
||||
digest: null,
|
||||
},
|
||||
id: expect.any(String),
|
||||
sourceRange: [399, 420],
|
||||
@ -317,7 +314,6 @@ const sk2 = startSketchOn('XY')
|
||||
start: 417,
|
||||
type: 'TagDeclarator',
|
||||
value: 'o',
|
||||
digest: null,
|
||||
},
|
||||
__geoMeta: {
|
||||
id: expect.any(String),
|
||||
|
@ -18,6 +18,7 @@ export default class CodeManager {
|
||||
#updateState: (arg: string) => void = () => {}
|
||||
private _currentFilePath: string | null = null
|
||||
private _hotkeys: { [key: string]: () => void } = {}
|
||||
private timeoutWriter: ReturnType<typeof setTimeout> | undefined = undefined
|
||||
|
||||
constructor() {
|
||||
if (isDesktop()) {
|
||||
@ -115,7 +116,11 @@ export default class CodeManager {
|
||||
|
||||
async writeToFile() {
|
||||
if (isDesktop()) {
|
||||
setTimeout(() => {
|
||||
// Only write our buffer contents to file once per second. Any faster
|
||||
// and file-system watchers which read, will receive empty data during
|
||||
// writes.
|
||||
clearTimeout(this.timeoutWriter)
|
||||
this.timeoutWriter = setTimeout(() => {
|
||||
// Wait one event loop to give a chance for params to be set
|
||||
// Save the file to disk
|
||||
this._currentFilePath &&
|
||||
@ -126,7 +131,7 @@ export default class CodeManager {
|
||||
console.error('error saving file', err)
|
||||
toast.error('Error saving file, please check file permissions')
|
||||
})
|
||||
})
|
||||
}, 1000)
|
||||
} else {
|
||||
safeLSSetItem(PERSIST_CODE_KEY, this.code)
|
||||
}
|
||||
|
@ -73,7 +73,6 @@ const newVar = myVar + 1`
|
||||
start: 89,
|
||||
type: 'TagDeclarator',
|
||||
value: 'myPath',
|
||||
digest: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -99,7 +98,6 @@ const newVar = myVar + 1`
|
||||
start: 143,
|
||||
type: 'TagDeclarator',
|
||||
value: 'rightPath',
|
||||
digest: null,
|
||||
},
|
||||
},
|
||||
])
|
||||
@ -201,7 +199,6 @@ const newVar = myVar + 1`
|
||||
start: 109,
|
||||
type: 'TagDeclarator',
|
||||
value: 'myPath',
|
||||
digest: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -100,15 +100,15 @@ describe('Testing findUniqueName', () => {
|
||||
it('should find a unique name', () => {
|
||||
const result = findUniqueName(
|
||||
JSON.stringify([
|
||||
{ type: 'Identifier', name: 'yo01', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo02', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo03', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo04', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo05', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo06', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo07', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo08', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo09', start: 0, end: 0, digest: null },
|
||||
{ type: 'Identifier', name: 'yo01', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo02', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo03', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo04', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo05', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo06', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo07', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo08', start: 0, end: 0 },
|
||||
{ type: 'Identifier', name: 'yo09', start: 0, end: 0 },
|
||||
] satisfies Identifier[]),
|
||||
'yo',
|
||||
2
|
||||
@ -123,8 +123,7 @@ describe('Testing addSketchTo', () => {
|
||||
body: [],
|
||||
start: 0,
|
||||
end: 0,
|
||||
nonCodeMeta: { nonCodeNodes: {}, start: [], digest: null },
|
||||
digest: null,
|
||||
nonCodeMeta: { nonCodeNodes: {}, start: [] },
|
||||
},
|
||||
'yz'
|
||||
)
|
||||
|
@ -241,7 +241,6 @@ export function mutateObjExpProp(
|
||||
value: updateWith,
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -579,7 +578,6 @@ export function createLiteral(value: string | number): Literal {
|
||||
end: 0,
|
||||
value,
|
||||
raw: `${value}`,
|
||||
digest: null,
|
||||
}
|
||||
}
|
||||
|
||||
@ -588,7 +586,7 @@ export function createTagDeclarator(value: string): TagDeclarator {
|
||||
type: 'TagDeclarator',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
value,
|
||||
}
|
||||
}
|
||||
@ -598,7 +596,7 @@ export function createIdentifier(name: string): Identifier {
|
||||
type: 'Identifier',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
name,
|
||||
}
|
||||
}
|
||||
@ -608,7 +606,6 @@ export function createPipeSubstitution(): PipeSubstitution {
|
||||
type: 'PipeSubstitution',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
}
|
||||
}
|
||||
|
||||
@ -624,12 +621,11 @@ export function createCallExpressionStdLib(
|
||||
type: 'Identifier',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
name,
|
||||
},
|
||||
optional: false,
|
||||
arguments: args,
|
||||
digest: null,
|
||||
}
|
||||
}
|
||||
|
||||
@ -645,12 +641,11 @@ export function createCallExpression(
|
||||
type: 'Identifier',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
name,
|
||||
},
|
||||
optional: false,
|
||||
arguments: args,
|
||||
digest: null,
|
||||
}
|
||||
}
|
||||
|
||||
@ -661,7 +656,7 @@ export function createArrayExpression(
|
||||
type: 'ArrayExpression',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
nonCodeMeta: nonCodeMetaEmpty(),
|
||||
elements,
|
||||
}
|
||||
@ -674,7 +669,7 @@ export function createPipeExpression(
|
||||
type: 'PipeExpression',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
body,
|
||||
nonCodeMeta: nonCodeMetaEmpty(),
|
||||
}
|
||||
@ -690,13 +685,13 @@ export function createVariableDeclaration(
|
||||
type: 'VariableDeclaration',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
declarations: [
|
||||
{
|
||||
type: 'VariableDeclarator',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
id: createIdentifier(varName),
|
||||
init,
|
||||
},
|
||||
@ -713,14 +708,14 @@ export function createObjectExpression(properties: {
|
||||
type: 'ObjectExpression',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
nonCodeMeta: nonCodeMetaEmpty(),
|
||||
properties: Object.entries(properties).map(([key, value]) => ({
|
||||
type: 'ObjectProperty',
|
||||
start: 0,
|
||||
end: 0,
|
||||
key: createIdentifier(key),
|
||||
digest: null,
|
||||
|
||||
value,
|
||||
})),
|
||||
}
|
||||
@ -734,7 +729,7 @@ export function createUnaryExpression(
|
||||
type: 'UnaryExpression',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
operator,
|
||||
argument,
|
||||
}
|
||||
@ -749,7 +744,7 @@ export function createBinaryExpression([left, operator, right]: [
|
||||
type: 'BinaryExpression',
|
||||
start: 0,
|
||||
end: 0,
|
||||
digest: null,
|
||||
|
||||
operator,
|
||||
left,
|
||||
right,
|
||||
@ -1139,5 +1134,5 @@ export async function deleteFromSelection(
|
||||
}
|
||||
|
||||
const nonCodeMetaEmpty = () => {
|
||||
return { nonCodeNodes: {}, start: [], digest: null }
|
||||
return { nonCodeNodes: {}, start: [] }
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ beforeAll(async () => {
|
||||
},
|
||||
})
|
||||
})
|
||||
}, 20_000)
|
||||
}, 30_000)
|
||||
|
||||
afterAll(() => {
|
||||
engineCommandManager.tearDown()
|
||||
|
@ -50,6 +50,11 @@ export enum ExportIntent {
|
||||
Make = 'make',
|
||||
}
|
||||
|
||||
export interface ExportInfo {
|
||||
intent: ExportIntent
|
||||
name: string
|
||||
}
|
||||
|
||||
type ClientMetrics = Models['ClientMetrics_type']
|
||||
|
||||
interface WebRTCClientMetrics extends ClientMetrics {
|
||||
@ -1354,7 +1359,7 @@ export class EngineCommandManager extends EventTarget {
|
||||
* export in progress. Otherwise it is an enum value of the intent.
|
||||
* Another export cannot be started if one is already in progress.
|
||||
*/
|
||||
private _exportIntent: ExportIntent | null = null
|
||||
private _exportInfo: ExportInfo | null = null
|
||||
_commandLogCallBack: (command: CommandLog[]) => void = () => {}
|
||||
|
||||
subscriptions: {
|
||||
@ -1410,12 +1415,12 @@ export class EngineCommandManager extends EventTarget {
|
||||
(() => {}) as any
|
||||
kclManager: null | KclManager = null
|
||||
|
||||
set exportIntent(intent: ExportIntent | null) {
|
||||
this._exportIntent = intent
|
||||
set exportInfo(info: ExportInfo | null) {
|
||||
this._exportInfo = info
|
||||
}
|
||||
|
||||
get exportIntent() {
|
||||
return this._exportIntent
|
||||
get exportInfo() {
|
||||
return this._exportInfo
|
||||
}
|
||||
|
||||
start({
|
||||
@ -1607,7 +1612,7 @@ export class EngineCommandManager extends EventTarget {
|
||||
// because in all other cases we send JSON strings. But in the case of
|
||||
// export we send a binary blob.
|
||||
// Pass this to our export function.
|
||||
if (this.exportIntent === null || this.pendingExport === undefined) {
|
||||
if (this.exportInfo === null || this.pendingExport === undefined) {
|
||||
toast.error(
|
||||
'Export intent was not set, but export data was received'
|
||||
)
|
||||
@ -1617,7 +1622,7 @@ export class EngineCommandManager extends EventTarget {
|
||||
return
|
||||
}
|
||||
|
||||
switch (this.exportIntent) {
|
||||
switch (this.exportInfo.intent) {
|
||||
case ExportIntent.Save: {
|
||||
exportSave(event.data, this.pendingExport.toastId).then(() => {
|
||||
this.pendingExport?.resolve(null)
|
||||
@ -1625,21 +1630,22 @@ export class EngineCommandManager extends EventTarget {
|
||||
break
|
||||
}
|
||||
case ExportIntent.Make: {
|
||||
exportMake(event.data, this.pendingExport.toastId).then(
|
||||
(result) => {
|
||||
if (result) {
|
||||
this.pendingExport?.resolve(null)
|
||||
} else {
|
||||
this.pendingExport?.reject('Failed to make export')
|
||||
}
|
||||
},
|
||||
this.pendingExport?.reject
|
||||
)
|
||||
exportMake(
|
||||
event.data,
|
||||
this.exportInfo.name,
|
||||
this.pendingExport.toastId
|
||||
).then((result) => {
|
||||
if (result) {
|
||||
this.pendingExport?.resolve(null)
|
||||
} else {
|
||||
this.pendingExport?.reject('Failed to make export')
|
||||
}
|
||||
}, this.pendingExport?.reject)
|
||||
break
|
||||
}
|
||||
}
|
||||
// Set the export intent back to null.
|
||||
this.exportIntent = null
|
||||
this.exportInfo = null
|
||||
return
|
||||
}
|
||||
|
||||
@ -1953,15 +1959,15 @@ export class EngineCommandManager extends EventTarget {
|
||||
return Promise.resolve(null)
|
||||
} else if (cmd.type === 'export') {
|
||||
const promise = new Promise<null>((resolve, reject) => {
|
||||
if (this.exportIntent === null) {
|
||||
if (this.exportIntent === null) {
|
||||
if (this.exportInfo === null) {
|
||||
if (this.exportInfo === null) {
|
||||
toast.error('Export intent was not set, but export is being sent')
|
||||
console.error('Export intent was not set, but export is being sent')
|
||||
return
|
||||
}
|
||||
}
|
||||
const toastId = toast.loading(
|
||||
this.exportIntent === ExportIntent.Save
|
||||
this.exportInfo.intent === ExportIntent.Save
|
||||
? EXPORT_TOAST_MESSAGES.START
|
||||
: MAKE_TOAST_MESSAGES.START
|
||||
)
|
||||
@ -1975,7 +1981,7 @@ export class EngineCommandManager extends EventTarget {
|
||||
resolve(passThrough)
|
||||
},
|
||||
reject: (reason: string) => {
|
||||
this.exportIntent = null
|
||||
this.exportInfo = null
|
||||
reject(reason)
|
||||
},
|
||||
commandId: command.cmd_id,
|
||||
|
@ -18,7 +18,7 @@ class FileSystemManager {
|
||||
return Promise.resolve(window.electron.path.join(dir, path))
|
||||
}
|
||||
|
||||
async readFile(path: string): Promise<Uint8Array | void> {
|
||||
async readFile(path: string): Promise<Uint8Array> {
|
||||
// Using local file system only works from desktop.
|
||||
if (!isDesktop()) {
|
||||
return Promise.reject(
|
||||
|
@ -1823,11 +1823,10 @@ export const updateStartProfileAtArgs: SketchLineHelper['updateArgs'] = ({
|
||||
start: 0,
|
||||
end: 0,
|
||||
body: [],
|
||||
digest: null,
|
||||
|
||||
nonCodeMeta: {
|
||||
start: [],
|
||||
nonCodeNodes: [],
|
||||
digest: null,
|
||||
},
|
||||
},
|
||||
pathToNode,
|
||||
|
@ -110,6 +110,7 @@ const initialise = async () => {
|
||||
const fullUrl = wasmUrl()
|
||||
const input = await fetch(fullUrl)
|
||||
const buffer = await input.arrayBuffer()
|
||||
|
||||
return await init(buffer)
|
||||
} catch (e) {
|
||||
console.log('Error initialising WASM', e)
|
||||
|
3
src/lib/codeEditor.ts
Normal file
@ -0,0 +1,3 @@
|
||||
export const normalizeLineEndings = (str: string, normalized = '\n') => {
|
||||
return str.replace(/\r?\n/g, normalized)
|
||||
}
|
@ -190,10 +190,31 @@ export const modelingMachineCommandConfig: StateMachineCommandSetConfig<
|
||||
options: () => {
|
||||
return Object.entries(machineManager.machines).map(
|
||||
([_, machine]) => ({
|
||||
name: `${machine.id} (${
|
||||
machine.make_model.model || machine.make_model.manufacturer
|
||||
}) via ${machineManager.machineApiIp || 'the local network'}`,
|
||||
name:
|
||||
`${machine.id} (${
|
||||
machine.make_model.model || machine.make_model.manufacturer
|
||||
}) (${machine.state.state})` +
|
||||
(machine.hardware_configuration &&
|
||||
machine.hardware_configuration.type !== 'none' &&
|
||||
machine.hardware_configuration.config.nozzle_diameter
|
||||
? ` - Nozzle Diameter: ${machine.hardware_configuration.config.nozzle_diameter}`
|
||||
: '') +
|
||||
(machine.hardware_configuration &&
|
||||
machine.hardware_configuration.type !== 'none' &&
|
||||
machine.hardware_configuration.config.filaments &&
|
||||
machine.hardware_configuration.config.filaments[0]
|
||||
? ` - ${
|
||||
machine.hardware_configuration.config.filaments[0].name
|
||||
} #${
|
||||
machine.hardware_configuration.config &&
|
||||
machine.hardware_configuration.config.filaments[0].color?.slice(
|
||||
0,
|
||||
6
|
||||
)
|
||||
}`
|
||||
: ''),
|
||||
isCurrent: false,
|
||||
disabled: machine.state.state !== 'idle',
|
||||
value: machine as components['schemas']['MachineInfoResponse'],
|
||||
})
|
||||
)
|
||||
|
@ -258,5 +258,6 @@ export type CommandArgumentWithName<
|
||||
export type CommandArgumentOption<A> = {
|
||||
name: string
|
||||
isCurrent?: boolean
|
||||
disabled?: boolean
|
||||
value: A
|
||||
}
|
||||
|
@ -92,6 +92,7 @@ export const MAKE_TOAST_MESSAGES = {
|
||||
NO_MACHINE_API_IP: 'No machine api ip available',
|
||||
NO_CURRENT_MACHINE: 'No current machine available',
|
||||
NO_MACHINE_ID: 'No machine id available',
|
||||
NO_NAME: 'No name provided',
|
||||
ERROR_STARTING_PRINT: 'Error while starting print',
|
||||
SUCCESS: 'Started print successfully',
|
||||
}
|
||||
|
@ -448,7 +448,9 @@ export const readProjectSettingsFile = async (
|
||||
}
|
||||
}
|
||||
|
||||
const configToml = await window.electron.readFile(settingsPath)
|
||||
const configToml = await window.electron.readFile(settingsPath, {
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
const configObj = parseProjectSettings(configToml)
|
||||
if (err(configObj)) {
|
||||
return Promise.reject(configObj)
|
||||
@ -467,7 +469,9 @@ export const readAppSettingsFile = async () => {
|
||||
|
||||
// The file exists, read it and parse it.
|
||||
if (window.electron.exists(settingsPath)) {
|
||||
const configToml = await window.electron.readFile(settingsPath)
|
||||
const configToml = await window.electron.readFile(settingsPath, {
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
const parsedAppConfig = parseAppSettings(configToml)
|
||||
if (err(parsedAppConfig)) {
|
||||
return Promise.reject(parsedAppConfig)
|
||||
@ -527,7 +531,9 @@ export const readTokenFile = async () => {
|
||||
let settingsPath = await getTokenFilePath()
|
||||
|
||||
if (window.electron.exists(settingsPath)) {
|
||||
const token: string = await window.electron.readFile(settingsPath)
|
||||
const token: string = await window.electron.readFile(settingsPath, {
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
if (!token) return ''
|
||||
|
||||
return token
|
||||
|
31
src/lib/engineUtils.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import EngineUtils from '@engine-utils'
|
||||
|
||||
type KCEngineUtilsEvaluatePath = {
|
||||
(sketch: string, t: number): string
|
||||
}
|
||||
let kcEngineUtilsEvaluatePath: KCEngineUtilsEvaluatePath
|
||||
|
||||
export async function init() {
|
||||
return await new Promise((resolve, reject) => {
|
||||
try {
|
||||
EngineUtils().then((module) => {
|
||||
kcEngineUtilsEvaluatePath = module.cwrap(
|
||||
'kcEngineUtilsEvaluatePath',
|
||||
'string',
|
||||
['string', 'number']
|
||||
)
|
||||
resolve(true)
|
||||
})
|
||||
} catch (e) {
|
||||
reject(e)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function getTruePathEndPos(sketch: string) {
|
||||
if (!kcEngineUtilsEvaluatePath) {
|
||||
await init()
|
||||
}
|
||||
|
||||
return kcEngineUtilsEvaluatePath(sketch, 1.0)
|
||||
}
|
@ -8,8 +8,15 @@ import { MAKE_TOAST_MESSAGES } from './constants'
|
||||
// Make files locally from an export call.
|
||||
export async function exportMake(
|
||||
data: ArrayBuffer,
|
||||
name: string,
|
||||
toastId: string
|
||||
): Promise<Response | null> {
|
||||
if (name === '') {
|
||||
console.error(MAKE_TOAST_MESSAGES.NO_NAME)
|
||||
toast.error(MAKE_TOAST_MESSAGES.NO_NAME, { id: toastId })
|
||||
return null
|
||||
}
|
||||
|
||||
if (machineManager.machineCount() === 0) {
|
||||
console.error(MAKE_TOAST_MESSAGES.NO_MACHINES)
|
||||
toast.error(MAKE_TOAST_MESSAGES.NO_MACHINES, { id: toastId })
|
||||
@ -39,7 +46,7 @@ export async function exportMake(
|
||||
|
||||
const params: components['schemas']['PrintParameters'] = {
|
||||
machine_id: machineId,
|
||||
job_name: 'Exported Job', // TODO: make this the project name.
|
||||
job_name: name,
|
||||
}
|
||||
try {
|
||||
console.log('params', params)
|
||||
|
177
src/lib/machine-api.d.ts
vendored
@ -119,18 +119,96 @@ export interface components {
|
||||
/** @description Extra machine-specific information regarding a connected machine. */
|
||||
ExtraMachineInfoResponse:
|
||||
| {
|
||||
Moonraker: Record<string, never>
|
||||
/** @enum {string} */
|
||||
type: 'moonraker'
|
||||
}
|
||||
| {
|
||||
Usb: Record<string, never>
|
||||
/** @enum {string} */
|
||||
type: 'usb'
|
||||
}
|
||||
| {
|
||||
Bambu: Record<string, never>
|
||||
/** @description The current stage of the machine as defined by Bambu which can include errors, etc. */
|
||||
current_stage?: components['schemas']['Stage'] | null
|
||||
/** @description The nozzle diameter of the machine. */
|
||||
nozzle_diameter: components['schemas']['NozzleDiameter']
|
||||
/** @enum {string} */
|
||||
type: 'bambu'
|
||||
}
|
||||
/** @description Configuration for a FDM-based printer. */
|
||||
FdmHardwareConfiguration: {
|
||||
/** @description The filaments the printer has access to. */
|
||||
filaments: components['schemas']['Filament'][]
|
||||
/**
|
||||
* Format: double
|
||||
* @description Diameter of the extrusion nozzle, in mm.
|
||||
*/
|
||||
nozzle_diameter: number
|
||||
}
|
||||
/** @description Information about the filament being used in a FDM printer. */
|
||||
Filament: {
|
||||
/** @description The color (as hex without the `#`) of the filament, this is likely specific to the manufacturer. */
|
||||
color?: string | null
|
||||
/** @description The material that the filament is made of. */
|
||||
material: components['schemas']['FilamentMaterial']
|
||||
/** @description The name of the filament, this is likely specfic to the manufacturer. */
|
||||
name?: string | null
|
||||
}
|
||||
/** @description The material that the filament is made of. */
|
||||
FilamentMaterial:
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'pla'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'pla_support'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'abs'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'petg'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'nylon'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'tpu'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'pva'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'hips'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'composite'
|
||||
}
|
||||
/** @description The hardware configuration of a machine. */
|
||||
HardwareConfiguration:
|
||||
| {
|
||||
/** @enum {string} */
|
||||
type: 'none'
|
||||
}
|
||||
| {
|
||||
/** @description The configuration for the FDM printer. */
|
||||
config: components['schemas']['FdmHardwareConfiguration']
|
||||
/** @enum {string} */
|
||||
type: 'fdm'
|
||||
}
|
||||
/** @description Information regarding a connected machine. */
|
||||
MachineInfoResponse: {
|
||||
/** @description Additional, per-machine information which is specific to the underlying machine type. */
|
||||
extra?: components['schemas']['ExtraMachineInfoResponse'] | null
|
||||
/** @description Information about how the Machine is currently configured. */
|
||||
hardware_configuration: components['schemas']['HardwareConfiguration']
|
||||
/** @description Machine Identifier (ID) for the specific Machine. */
|
||||
id: string
|
||||
/** @description Information regarding the method of manufacture. */
|
||||
@ -143,6 +221,11 @@ export interface components {
|
||||
*
|
||||
* What "close" means is up to you! */
|
||||
max_part_volume?: components['schemas']['Volume'] | null
|
||||
/**
|
||||
* Format: double
|
||||
* @description Progress of the current print, if printing.
|
||||
*/
|
||||
progress?: number | null
|
||||
/** @description Status of the printer -- be it printing, idle, or unreachable. This may dictate if a machine is capable of taking a new job. */
|
||||
state: components['schemas']['MachineState']
|
||||
}
|
||||
@ -157,17 +240,40 @@ export interface components {
|
||||
}
|
||||
/** @description Current state of the machine -- be it printing, idle or offline. This can be used to determine if a printer is in the correct state to take a new job. */
|
||||
MachineState:
|
||||
| 'Unknown'
|
||||
| 'Idle'
|
||||
| 'Running'
|
||||
| 'Offline'
|
||||
| 'Paused'
|
||||
| 'Complete'
|
||||
| {
|
||||
Failed: string | null
|
||||
/** @enum {string} */
|
||||
state: 'unknown'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
state: 'idle'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
state: 'running'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
state: 'offline'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
state: 'paused'
|
||||
}
|
||||
| {
|
||||
/** @enum {string} */
|
||||
state: 'complete'
|
||||
}
|
||||
| {
|
||||
/** @description A human-readable message describing the failure. */
|
||||
message?: string | null
|
||||
/** @enum {string} */
|
||||
state: 'failed'
|
||||
}
|
||||
/** @description Specific technique by which this Machine takes a design, and produces a real-world 3D object. */
|
||||
MachineType: 'Stereolithography' | 'FusedDeposition' | 'Cnc'
|
||||
MachineType: 'stereolithography' | 'fused_deposition' | 'cnc'
|
||||
/** @description A nozzle diameter. */
|
||||
NozzleDiameter: '0.2' | '0.4' | '0.6' | '0.8'
|
||||
/** @description The response from the `/ping` endpoint. */
|
||||
Pong: {
|
||||
/** @description The pong response. */
|
||||
@ -186,7 +292,56 @@ export interface components {
|
||||
job_name: string
|
||||
/** @description The machine id to print to. */
|
||||
machine_id: string
|
||||
/** @description Requested design-specific slicer configurations. */
|
||||
slicer_configuration?: components['schemas']['SlicerConfiguration'] | null
|
||||
}
|
||||
/** @description The slicer configuration is a set of parameters that are passed to the slicer to control how the gcode is generated. */
|
||||
SlicerConfiguration: {
|
||||
/**
|
||||
* Format: uint
|
||||
* @description The filament to use for the print.
|
||||
*/
|
||||
filament_idx?: number | null
|
||||
}
|
||||
/** @description The print stage. These come from: https://github.com/SoftFever/OrcaSlicer/blob/431978baf17961df90f0d01871b0ad1d839d7f5d/src/slic3r/GUI/DeviceManager.cpp#L78 */
|
||||
Stage:
|
||||
| 'nothing'
|
||||
| 'empty'
|
||||
| 'auto_bed_leveling'
|
||||
| 'heatbed_preheating'
|
||||
| 'sweeping_xy_mech_mode'
|
||||
| 'changing_filament'
|
||||
| 'm400_pause'
|
||||
| 'paused_due_to_filament_runout'
|
||||
| 'heating_hotend'
|
||||
| 'calibrating_extrusion'
|
||||
| 'scanning_bed_surface'
|
||||
| 'inspecting_first_layer'
|
||||
| 'identifying_build_plate_type'
|
||||
| 'calibrating_micro_lidar'
|
||||
| 'homing_toolhead'
|
||||
| 'cleaning_nozzle_tip'
|
||||
| 'checking_extruder_temperature'
|
||||
| 'printing_was_paused_by_the_user'
|
||||
| 'pause_of_front_cover_falling'
|
||||
| 'calibrating_micro_lidar2'
|
||||
| 'calibrating_extrusion_flow'
|
||||
| 'paused_due_to_nozzle_temperature_malfunction'
|
||||
| 'paused_due_to_heat_bed_temperature_malfunction'
|
||||
| 'filament_unloading'
|
||||
| 'skip_step_pause'
|
||||
| 'filament_loading'
|
||||
| 'motor_noise_calibration'
|
||||
| 'paused_due_to_ams_lost'
|
||||
| 'paused_due_to_low_speed_of_the_heat_break_fan'
|
||||
| 'paused_due_to_chamber_temperature_control_error'
|
||||
| 'cooling_chamber'
|
||||
| 'paused_by_the_gcode_inserted_by_the_user'
|
||||
| 'motor_noise_showoff'
|
||||
| 'nozzle_filament_covered_detected_pause'
|
||||
| 'cutter_error_pause'
|
||||
| 'first_layer_error_pause'
|
||||
| 'nozzle_clog_pause'
|
||||
/** @description Set of three values to represent the extent of a 3-D Volume. This contains the width, depth, and height values, generally used to represent some maximum or minimum.
|
||||
*
|
||||
* All measurements are in millimeters. */
|
||||
|
@ -85,7 +85,11 @@ export class MachineManager {
|
||||
return
|
||||
}
|
||||
|
||||
this._machines = await window.electron.listMachines()
|
||||
if (this._machineApiIp === null) {
|
||||
return
|
||||
}
|
||||
|
||||
this._machines = await window.electron.listMachines(this._machineApiIp)
|
||||
}
|
||||
|
||||
private async updateMachineApiIp(): Promise<void> {
|
||||
|
@ -14,6 +14,7 @@ import { codeManager } from 'lib/singletons'
|
||||
import { fileSystemManager } from 'lang/std/fileSystemManager'
|
||||
import { getProjectInfo } from './desktop'
|
||||
import { createSettings } from './settings/initialSettings'
|
||||
import { normalizeLineEndings } from 'lib/codeEditor'
|
||||
|
||||
// The root loader simply resolves the settings and any errors that
|
||||
// occurred during the settings load
|
||||
@ -108,7 +109,9 @@ export const fileLoader: LoaderFunction = async (
|
||||
)
|
||||
}
|
||||
|
||||
code = await window.electron.readFile(currentFilePath)
|
||||
code = await window.electron.readFile(currentFilePath, {
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
code = normalizeLineEndings(code)
|
||||
|
||||
// Update both the state and the editor's code.
|
||||
@ -182,7 +185,3 @@ export const homeLoader: LoaderFunction = async (): Promise<
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
const normalizeLineEndings = (str: string, normalized = '\n') => {
|
||||
return str.replace(/\r?\n/g, normalized)
|
||||
}
|
||||
|
@ -37,8 +37,6 @@ if (!process.env.NODE_ENV)
|
||||
// dotenv override when present
|
||||
dotenv.config({ path: [`.env.${NODE_ENV}.local`, `.env.${NODE_ENV}`] })
|
||||
|
||||
console.log(process.env)
|
||||
|
||||
process.env.VITE_KC_API_WS_MODELING_URL ??=
|
||||
'wss://api.zoo.dev/ws/modeling/commands'
|
||||
process.env.VITE_KC_API_BASE_URL ??= 'https://api.zoo.dev'
|
||||
@ -238,6 +236,7 @@ ipcMain.handle('find_machine_api', () => {
|
||||
const ip = service.addresses[0]
|
||||
const port = service.port
|
||||
// We want to return the ip address of the machine API.
|
||||
console.log(`Machine API found at ${ip}:${port}`)
|
||||
resolve(`${ip}:${port}`)
|
||||
}
|
||||
)
|
||||
|
@ -30,22 +30,51 @@ const isMac = os.platform() === 'darwin'
|
||||
const isWindows = os.platform() === 'win32'
|
||||
const isLinux = os.platform() === 'linux'
|
||||
|
||||
let fsWatchListeners = new Map<string, ReturnType<typeof chokidar.watch>>()
|
||||
let fsWatchListeners = new Map<
|
||||
string,
|
||||
Map<
|
||||
string,
|
||||
{
|
||||
watcher: ReturnType<typeof chokidar.watch>
|
||||
callback: (eventType: string, path: string) => void
|
||||
}
|
||||
>
|
||||
>()
|
||||
|
||||
const watchFileOn = (path: string, callback: (path: string) => void) => {
|
||||
const watcherMaybe = fsWatchListeners.get(path)
|
||||
if (watcherMaybe) return
|
||||
const watcher = chokidar.watch(path)
|
||||
const watchFileOn = (
|
||||
path: string,
|
||||
key: string,
|
||||
callback: (eventType: string, path: string) => void
|
||||
) => {
|
||||
let watchers = fsWatchListeners.get(path)
|
||||
if (!watchers) {
|
||||
watchers = new Map()
|
||||
}
|
||||
const watcher = chokidar.watch(path, { depth: 1 })
|
||||
watcher.on('all', callback)
|
||||
fsWatchListeners.set(path, watcher)
|
||||
watchers.set(key, { watcher, callback })
|
||||
fsWatchListeners.set(path, watchers)
|
||||
}
|
||||
const watchFileOff = (path: string) => {
|
||||
const watcher = fsWatchListeners.get(path)
|
||||
if (!watcher) return
|
||||
watcher.unwatch(path)
|
||||
fsWatchListeners.delete(path)
|
||||
const watchFileOff = (path: string, key: string) => {
|
||||
const watchers = fsWatchListeners.get(path)
|
||||
if (!watchers) return
|
||||
const data = watchers.get(key)
|
||||
if (!data) {
|
||||
console.warn(
|
||||
"Trying to remove a watcher, callback that doesn't exist anymore. Suspicious."
|
||||
)
|
||||
return
|
||||
}
|
||||
const { watcher, callback } = data
|
||||
watcher.off('all', callback)
|
||||
watchers.delete(key)
|
||||
if (watchers.size === 0) {
|
||||
fsWatchListeners.delete(path)
|
||||
} else {
|
||||
fsWatchListeners.set(path, watchers)
|
||||
}
|
||||
}
|
||||
const readFile = (path: string) => fs.readFile(path, 'utf-8')
|
||||
const readFile = fs.readFile
|
||||
// It seems like from the node source code this does not actually block but also
|
||||
// don't trust me on that (jess).
|
||||
const exists = (path: string) => fsSync.existsSync(path)
|
||||
@ -77,11 +106,12 @@ const kittycad = (access: string, args: any) =>
|
||||
|
||||
// We could probably do this from the renderer side, but I fear CORS will
|
||||
// bite our butts.
|
||||
const listMachines = async (): Promise<MachinesListing> => {
|
||||
const machineApi = await ipcRenderer.invoke('find_machine_api')
|
||||
if (!machineApi) return []
|
||||
|
||||
return fetch(`http://${machineApi}/machines`).then((resp) => resp.json())
|
||||
const listMachines = async (
|
||||
machineApiAddr: string
|
||||
): Promise<MachinesListing> => {
|
||||
return fetch(`http://${machineApiAddr}/machines`).then((resp) => {
|
||||
return resp.json()
|
||||
})
|
||||
}
|
||||
|
||||
const getMachineApiIp = async (): Promise<String | null> =>
|
||||
|
14
src/wasm-lib/Cargo.lock
generated
@ -121,9 +121,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.89"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
|
||||
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
]
|
||||
@ -1617,7 +1617,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-test-server"
|
||||
version = "0.1.13"
|
||||
version = "0.1.14"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"hyper 0.14.30",
|
||||
@ -1684,9 +1684,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kittycad-modeling-cmds"
|
||||
version = "0.2.68"
|
||||
version = "0.2.70"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e3aedfcc1d8ea9995ec3eb78a6743c585c9380475c48701797f107489b696aa"
|
||||
checksum = "b135696d07a4fab928e5abace4dd05f4976eafab5d73e5747a85dc5a684b936c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@ -3005,9 +3005,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.128"
|
||||
version = "1.0.132"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
|
||||
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
|
||||
dependencies = [
|
||||
"indexmap 2.6.0",
|
||||
"itoa",
|
||||
|
@ -72,7 +72,7 @@ members = [
|
||||
[workspace.dependencies]
|
||||
http = "1"
|
||||
kittycad = { version = "0.3.23", default-features = false, features = ["js", "requests"] }
|
||||
kittycad-modeling-cmds = { version = "0.2.68", features = ["websocket"] }
|
||||
kittycad-modeling-cmds = { version = "0.2.70", features = ["websocket"] }
|
||||
|
||||
[[test]]
|
||||
name = "executor"
|
||||
|
@ -12,7 +12,7 @@ fn basic() {
|
||||
let expected = Program {
|
||||
start: 0,
|
||||
end: 11,
|
||||
body: vec![BodyItem::VariableDeclaration(VariableDeclaration {
|
||||
body: vec![BodyItem::VariableDeclaration(Box::new(VariableDeclaration {
|
||||
start: 0,
|
||||
end: 11,
|
||||
declarations: vec![VariableDeclarator {
|
||||
@ -36,7 +36,7 @@ fn basic() {
|
||||
visibility: ItemVisibility::Default,
|
||||
kind: VariableKind::Const,
|
||||
digest: None,
|
||||
})],
|
||||
}))],
|
||||
non_code_meta: NonCodeMeta::default(),
|
||||
digest: None,
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-test-server"
|
||||
description = "A test server for KCL"
|
||||
version = "0.1.13"
|
||||
version = "0.1.14"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
|
@ -68,7 +68,7 @@ tokio-tungstenite = { version = "0.24.0", features = ["rustls-tls-native-roots"]
|
||||
tower-lsp = { version = "0.20.0", features = ["proposed"] }
|
||||
|
||||
[features]
|
||||
default = ["engine"]
|
||||
default = ["engine"] # add wasm-engine-utils here when we're ready
|
||||
cli = ["dep:clap"]
|
||||
# For the lsp server, when run with stdout for rpc we want to disable println.
|
||||
# This is used for editor extensions that use the lsp server.
|
||||
@ -77,6 +77,10 @@ engine = []
|
||||
pyo3 = ["dep:pyo3"]
|
||||
# Helper functions also used in benchmarks.
|
||||
lsp-test-util = []
|
||||
#if enabled, kcl will link directly against a wasm build of the engine utils lib to save latency
|
||||
wasm-engine-utils = []
|
||||
#if enabled, kcl will link directly against a native build of the engine utils lib to save latency (not yet functional)
|
||||
native-engine-utils = []
|
||||
|
||||
tabled = ["dep:tabled"]
|
||||
|
||||
|
@ -13,7 +13,6 @@ use parse_display::{Display, FromStr};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value as JValue;
|
||||
use sha2::{Digest as DigestTrait, Sha256};
|
||||
use tower_lsp::lsp_types::{
|
||||
CompletionItem, CompletionItemKind, DocumentSymbol, FoldingRange, FoldingRangeKind, Range as LspRange, SymbolKind,
|
||||
};
|
||||
@ -33,12 +32,12 @@ use crate::{
|
||||
};
|
||||
|
||||
mod condition;
|
||||
pub(crate) mod digest;
|
||||
pub(crate) mod execute;
|
||||
mod literal_value;
|
||||
mod none;
|
||||
|
||||
/// Position-independent digest of the AST node.
|
||||
pub type Digest = [u8; 32];
|
||||
use digest::Digest;
|
||||
|
||||
pub enum Definition<'a> {
|
||||
Variable(&'a VariableDeclarator),
|
||||
@ -54,45 +53,15 @@ pub struct Program {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
pub body: Vec<BodyItem>,
|
||||
#[serde(default, skip_serializing_if = "NonCodeMeta::is_empty")]
|
||||
pub non_code_meta: NonCodeMeta,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
macro_rules! compute_digest {
|
||||
(|$slf:ident, $hasher:ident| $body:block) => {
|
||||
/// Compute a digest over the AST node.
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
if let Some(node_digest) = self.digest {
|
||||
return node_digest;
|
||||
}
|
||||
|
||||
let mut $hasher = Sha256::new();
|
||||
|
||||
#[allow(unused_mut)]
|
||||
let mut $slf = self;
|
||||
|
||||
$hasher.update(std::any::type_name::<Self>());
|
||||
|
||||
$body
|
||||
|
||||
let node_digest: Digest = $hasher.finalize().into();
|
||||
$slf.digest = Some(node_digest);
|
||||
node_digest
|
||||
}
|
||||
};
|
||||
}
|
||||
pub(crate) use compute_digest;
|
||||
|
||||
impl Program {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.body.len().to_ne_bytes());
|
||||
for body_item in slf.body.iter_mut() {
|
||||
hasher.update(body_item.compute_digest());
|
||||
}
|
||||
hasher.update(slf.non_code_meta.compute_digest());
|
||||
});
|
||||
|
||||
/// Is the last body item an expression?
|
||||
pub fn ends_with_expr(&self) -> bool {
|
||||
let Some(ref last) = self.body.last() else {
|
||||
@ -485,20 +454,11 @@ pub(crate) use impl_value_meta;
|
||||
pub enum BodyItem {
|
||||
ImportStatement(Box<ImportStatement>),
|
||||
ExpressionStatement(ExpressionStatement),
|
||||
VariableDeclaration(VariableDeclaration),
|
||||
VariableDeclaration(Box<VariableDeclaration>),
|
||||
ReturnStatement(ReturnStatement),
|
||||
}
|
||||
|
||||
impl BodyItem {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
BodyItem::ImportStatement(s) => s.compute_digest(),
|
||||
BodyItem::ExpressionStatement(es) => es.compute_digest(),
|
||||
BodyItem::VariableDeclaration(vs) => vs.compute_digest(),
|
||||
BodyItem::ReturnStatement(rs) => rs.compute_digest(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
match self {
|
||||
BodyItem::ImportStatement(stmt) => stmt.start(),
|
||||
@ -554,30 +514,6 @@ pub enum Expr {
|
||||
}
|
||||
|
||||
impl Expr {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
Expr::Literal(lit) => lit.compute_digest(),
|
||||
Expr::Identifier(id) => id.compute_digest(),
|
||||
Expr::TagDeclarator(tag) => tag.compute_digest(),
|
||||
Expr::BinaryExpression(be) => be.compute_digest(),
|
||||
Expr::FunctionExpression(fe) => fe.compute_digest(),
|
||||
Expr::CallExpression(ce) => ce.compute_digest(),
|
||||
Expr::PipeExpression(pe) => pe.compute_digest(),
|
||||
Expr::PipeSubstitution(ps) => ps.compute_digest(),
|
||||
Expr::ArrayExpression(ae) => ae.compute_digest(),
|
||||
Expr::ArrayRangeExpression(are) => are.compute_digest(),
|
||||
Expr::ObjectExpression(oe) => oe.compute_digest(),
|
||||
Expr::MemberExpression(me) => me.compute_digest(),
|
||||
Expr::UnaryExpression(ue) => ue.compute_digest(),
|
||||
Expr::IfExpression(e) => e.compute_digest(),
|
||||
Expr::None(_) => {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"Value::None");
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_lsp_folding_range(&self) -> Option<FoldingRange> {
|
||||
let recasted = self.recast(&FormatOptions::default(), 0, false);
|
||||
// If the code only has one line then we don't need to fold it.
|
||||
@ -801,18 +737,6 @@ impl From<&BinaryPart> for SourceRange {
|
||||
}
|
||||
|
||||
impl BinaryPart {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
BinaryPart::Literal(lit) => lit.compute_digest(),
|
||||
BinaryPart::Identifier(id) => id.compute_digest(),
|
||||
BinaryPart::BinaryExpression(be) => be.compute_digest(),
|
||||
BinaryPart::CallExpression(ce) => ce.compute_digest(),
|
||||
BinaryPart::UnaryExpression(ue) => ue.compute_digest(),
|
||||
BinaryPart::MemberExpression(me) => me.compute_digest(),
|
||||
BinaryPart::IfExpression(e) => e.compute_digest(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the constraint level.
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
match self {
|
||||
@ -916,6 +840,8 @@ pub struct NonCodeNode {
|
||||
pub end: usize,
|
||||
pub value: NonCodeValue,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -932,29 +858,6 @@ impl From<&NonCodeNode> for SourceRange {
|
||||
}
|
||||
|
||||
impl NonCodeNode {
|
||||
compute_digest!(|slf, hasher| {
|
||||
match &slf.value {
|
||||
NonCodeValue::Shebang { value } => {
|
||||
hasher.update(value);
|
||||
}
|
||||
NonCodeValue::InlineComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::BlockComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::NewLineBlockComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::NewLine => {
|
||||
hasher.update(b"\r\n");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
pub fn contains(&self, pos: usize) -> bool {
|
||||
self.start <= pos && pos <= self.end
|
||||
}
|
||||
@ -1083,6 +986,8 @@ pub struct NonCodeMeta {
|
||||
pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>,
|
||||
pub start: Vec<NonCodeNode>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1127,20 +1032,6 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
|
||||
}
|
||||
|
||||
impl NonCodeMeta {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let mut keys = slf.non_code_nodes.keys().copied().collect::<Vec<_>>();
|
||||
keys.sort();
|
||||
|
||||
for key in keys.into_iter() {
|
||||
hasher.update(key.to_ne_bytes());
|
||||
let nodes = slf.non_code_nodes.get_mut(&key).unwrap();
|
||||
hasher.update(nodes.len().to_ne_bytes());
|
||||
for node in nodes.iter_mut() {
|
||||
hasher.update(node.compute_digest());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
pub fn insert(&mut self, i: usize, new: NonCodeNode) {
|
||||
self.non_code_nodes.entry(i).or_default().push(new);
|
||||
}
|
||||
@ -1169,24 +1060,14 @@ pub struct ImportItem {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(ImportItem);
|
||||
|
||||
impl ImportItem {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
if let Some(alias) = &mut slf.alias {
|
||||
hasher.update([1]);
|
||||
hasher.update(alias.compute_digest());
|
||||
} else {
|
||||
hasher.update([0]);
|
||||
}
|
||||
});
|
||||
|
||||
pub fn identifier(&self) -> &str {
|
||||
match &self.alias {
|
||||
Some(alias) => &alias.name,
|
||||
@ -1233,21 +1114,14 @@ pub struct ImportStatement {
|
||||
pub path: String,
|
||||
pub raw_path: String,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(ImportStatement);
|
||||
|
||||
impl ImportStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
for item in &mut slf.items {
|
||||
hasher.update(item.compute_digest());
|
||||
}
|
||||
let path = slf.path.as_bytes();
|
||||
hasher.update(path.len().to_ne_bytes());
|
||||
hasher.update(path);
|
||||
});
|
||||
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
ConstraintLevel::Full {
|
||||
source_ranges: vec![self.into()],
|
||||
@ -1283,17 +1157,13 @@ pub struct ExpressionStatement {
|
||||
pub end: usize,
|
||||
pub expression: Expr,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(ExpressionStatement);
|
||||
|
||||
impl ExpressionStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.expression.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
|
||||
#[databake(path = kcl_lib::ast::types)]
|
||||
#[ts(export)]
|
||||
@ -1305,6 +1175,8 @@ pub struct CallExpression {
|
||||
pub arguments: Vec<Expr>,
|
||||
pub optional: bool,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1328,15 +1200,6 @@ impl CallExpression {
|
||||
})
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.callee.compute_digest());
|
||||
hasher.update(slf.arguments.len().to_ne_bytes());
|
||||
for argument in slf.arguments.iter_mut() {
|
||||
hasher.update(argument.compute_digest());
|
||||
}
|
||||
hasher.update(if slf.optional { [1] } else { [0] });
|
||||
});
|
||||
|
||||
/// Is at least one argument the '%' i.e. the substitution operator?
|
||||
pub fn has_substitution_arg(&self) -> bool {
|
||||
self.arguments
|
||||
@ -1473,6 +1336,8 @@ pub struct VariableDeclaration {
|
||||
pub visibility: ItemVisibility,
|
||||
pub kind: VariableKind, // Change to enum if there are specific values
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1511,15 +1376,6 @@ impl From<&VariableDeclaration> for Vec<CompletionItem> {
|
||||
impl_value_meta!(VariableDeclaration);
|
||||
|
||||
impl VariableDeclaration {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.declarations.len().to_ne_bytes());
|
||||
for declarator in &mut slf.declarations {
|
||||
hasher.update(declarator.compute_digest());
|
||||
}
|
||||
hasher.update(slf.visibility.digestable_id());
|
||||
hasher.update(slf.kind.digestable_id());
|
||||
});
|
||||
|
||||
pub fn new(declarations: Vec<VariableDeclarator>, visibility: ItemVisibility, kind: VariableKind) -> Self {
|
||||
Self {
|
||||
start: 0,
|
||||
@ -1736,6 +1592,8 @@ pub struct VariableDeclarator {
|
||||
/// The value of the variable.
|
||||
pub init: Expr,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1752,11 +1610,6 @@ impl VariableDeclarator {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.id.compute_digest());
|
||||
hasher.update(slf.init.compute_digest());
|
||||
});
|
||||
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
self.init.get_constraint_level()
|
||||
}
|
||||
@ -1772,6 +1625,8 @@ pub struct Literal {
|
||||
pub value: LiteralValue,
|
||||
pub raw: String,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1788,10 +1643,6 @@ impl Literal {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.value.digestable_id());
|
||||
});
|
||||
|
||||
/// Get the constraint level for this literal.
|
||||
/// Literals are always not constrained.
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
@ -1832,6 +1683,8 @@ pub struct Identifier {
|
||||
pub end: usize,
|
||||
pub name: String,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1847,12 +1700,6 @@ impl Identifier {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
});
|
||||
|
||||
/// Get the constraint level for this identifier.
|
||||
/// Identifier are always fully constrained.
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
@ -1879,6 +1726,8 @@ pub struct TagDeclarator {
|
||||
#[serde(rename = "value")]
|
||||
pub name: String,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -1955,12 +1804,6 @@ impl TagDeclarator {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
});
|
||||
|
||||
/// Get the constraint level for this identifier.
|
||||
/// TagDeclarator are always fully constrained.
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
@ -2003,6 +1846,8 @@ pub struct PipeSubstitution {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2016,10 +1861,6 @@ impl PipeSubstitution {
|
||||
digest: None,
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(b"PipeSubstitution");
|
||||
});
|
||||
}
|
||||
|
||||
impl Default for PipeSubstitution {
|
||||
@ -2045,6 +1886,8 @@ pub struct ArrayExpression {
|
||||
#[serde(default, skip_serializing_if = "NonCodeMeta::is_empty")]
|
||||
pub non_code_meta: NonCodeMeta,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2067,13 +1910,6 @@ impl ArrayExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.elements.len().to_ne_bytes());
|
||||
for value in slf.elements.iter_mut() {
|
||||
hasher.update(value.compute_digest());
|
||||
}
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
for element in &mut self.elements {
|
||||
element.replace_value(source_range, new_value.clone());
|
||||
@ -2127,6 +1963,8 @@ pub struct ArrayRangeExpression {
|
||||
/// Is the `end_element` included in the range?
|
||||
pub end_inclusive: bool,
|
||||
// TODO (maybe) comments on range components?
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2150,11 +1988,6 @@ impl ArrayRangeExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.start_element.compute_digest());
|
||||
hasher.update(slf.end_element.compute_digest());
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
self.start_element.replace_value(source_range, new_value.clone());
|
||||
self.end_element.replace_value(source_range, new_value.clone());
|
||||
@ -2198,6 +2031,8 @@ pub struct ObjectExpression {
|
||||
#[serde(default, skip_serializing_if = "NonCodeMeta::is_empty")]
|
||||
pub non_code_meta: NonCodeMeta,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2212,13 +2047,6 @@ impl ObjectExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.properties.len().to_ne_bytes());
|
||||
for prop in slf.properties.iter_mut() {
|
||||
hasher.update(prop.compute_digest());
|
||||
}
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
for property in &mut self.properties {
|
||||
property.value.replace_value(source_range, new_value.clone());
|
||||
@ -2272,17 +2100,14 @@ pub struct ObjectProperty {
|
||||
pub key: Identifier,
|
||||
pub value: Expr,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(ObjectProperty);
|
||||
|
||||
impl ObjectProperty {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.key.compute_digest());
|
||||
hasher.update(slf.value.compute_digest());
|
||||
});
|
||||
|
||||
pub fn get_lsp_symbols(&self, code: &str) -> Vec<DocumentSymbol> {
|
||||
let source_range: SourceRange = self.clone().into();
|
||||
let inner_source_range: SourceRange = self.key.clone().into();
|
||||
@ -2322,13 +2147,6 @@ pub enum MemberObject {
|
||||
}
|
||||
|
||||
impl MemberObject {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
MemberObject::MemberExpression(me) => me.compute_digest(),
|
||||
MemberObject::Identifier(id) => id.compute_digest(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a hover value that includes the given character position.
|
||||
pub fn get_hover_value_for_position(&self, pos: usize, code: &str) -> Option<Hover> {
|
||||
match self {
|
||||
@ -2376,13 +2194,6 @@ pub enum LiteralIdentifier {
|
||||
}
|
||||
|
||||
impl LiteralIdentifier {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
LiteralIdentifier::Identifier(id) => id.compute_digest(),
|
||||
LiteralIdentifier::Literal(lit) => lit.compute_digest(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
match self {
|
||||
LiteralIdentifier::Identifier(identifier) => identifier.start,
|
||||
@ -2421,18 +2232,14 @@ pub struct MemberExpression {
|
||||
pub property: LiteralIdentifier,
|
||||
pub computed: bool,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(MemberExpression);
|
||||
|
||||
impl MemberExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.object.compute_digest());
|
||||
hasher.update(slf.property.compute_digest());
|
||||
hasher.update(if slf.computed { [1] } else { [0] });
|
||||
});
|
||||
|
||||
/// Get the constraint level for a member expression.
|
||||
/// This is always fully constrained.
|
||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||
@ -2486,6 +2293,8 @@ pub struct BinaryExpression {
|
||||
pub left: BinaryPart,
|
||||
pub right: BinaryPart,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2503,12 +2312,6 @@ impl BinaryExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.operator.digestable_id());
|
||||
hasher.update(slf.left.compute_digest());
|
||||
hasher.update(slf.right.compute_digest());
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
self.left.replace_value(source_range, new_value.clone());
|
||||
self.right.replace_value(source_range, new_value);
|
||||
@ -2674,6 +2477,8 @@ pub struct UnaryExpression {
|
||||
pub operator: UnaryOperator,
|
||||
pub argument: BinaryPart,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2690,11 +2495,6 @@ impl UnaryExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.operator.digestable_id());
|
||||
hasher.update(slf.argument.compute_digest());
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
self.argument.replace_value(source_range, new_value);
|
||||
}
|
||||
@ -2754,8 +2554,11 @@ pub struct PipeExpression {
|
||||
// TODO: Only the first body expression can be any Value.
|
||||
// The rest will be CallExpression, and the AST type should reflect this.
|
||||
pub body: Vec<Expr>,
|
||||
#[serde(default, skip_serializing_if = "NonCodeMeta::is_empty")]
|
||||
pub non_code_meta: NonCodeMeta,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
@ -2778,14 +2581,6 @@ impl PipeExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.body.len().to_ne_bytes());
|
||||
for value in slf.body.iter_mut() {
|
||||
hasher.update(value.compute_digest());
|
||||
}
|
||||
hasher.update(slf.non_code_meta.compute_digest());
|
||||
});
|
||||
|
||||
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Expr) {
|
||||
for value in &mut self.body {
|
||||
value.replace_value(source_range, new_value.clone());
|
||||
@ -2884,32 +2679,6 @@ pub enum FnArgType {
|
||||
},
|
||||
}
|
||||
|
||||
impl FnArgType {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
match self {
|
||||
FnArgType::Primitive(prim) => {
|
||||
hasher.update(b"FnArgType::Primitive");
|
||||
hasher.update(prim.digestable_id())
|
||||
}
|
||||
FnArgType::Array(prim) => {
|
||||
hasher.update(b"FnArgType::Array");
|
||||
hasher.update(prim.digestable_id())
|
||||
}
|
||||
FnArgType::Object { properties } => {
|
||||
hasher.update(b"FnArgType::Object");
|
||||
hasher.update(properties.len().to_ne_bytes());
|
||||
for prop in properties.iter_mut() {
|
||||
hasher.update(prop.compute_digest());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parameter of a KCL function.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, JsonSchema, Bake)]
|
||||
#[databake(path = kcl_lib::ast::types)]
|
||||
@ -2925,25 +2694,11 @@ pub struct Parameter {
|
||||
/// Is the parameter optional?
|
||||
pub optional: bool,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl Parameter {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.identifier.compute_digest());
|
||||
match &mut slf.type_ {
|
||||
Some(arg) => {
|
||||
hasher.update(b"Parameter::type_::Some");
|
||||
hasher.update(arg.compute_digest())
|
||||
}
|
||||
None => {
|
||||
hasher.update(b"Parameter::type_::None");
|
||||
}
|
||||
}
|
||||
hasher.update(if slf.optional { [1] } else { [0] })
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
|
||||
#[databake(path = kcl_lib::ast::types)]
|
||||
#[ts(export)]
|
||||
@ -2956,13 +2711,15 @@ pub struct FunctionExpression {
|
||||
#[serde(skip)]
|
||||
pub return_type: Option<FnArgType>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(FunctionExpression);
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct RequiredParamAfterOptionalParam(pub Parameter);
|
||||
pub struct RequiredParamAfterOptionalParam(pub Box<Parameter>);
|
||||
|
||||
impl std::fmt::Display for RequiredParamAfterOptionalParam {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
@ -2978,23 +2735,6 @@ impl FunctionExpression {
|
||||
}
|
||||
}
|
||||
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.params.len().to_ne_bytes());
|
||||
for param in slf.params.iter_mut() {
|
||||
hasher.update(param.compute_digest());
|
||||
}
|
||||
hasher.update(slf.body.compute_digest());
|
||||
match &mut slf.return_type {
|
||||
Some(rt) => {
|
||||
hasher.update(b"FunctionExpression::return_type::Some");
|
||||
hasher.update(rt.compute_digest());
|
||||
}
|
||||
None => {
|
||||
hasher.update(b"FunctionExpression::return_type::None");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
pub fn required_and_optional_params(
|
||||
&self,
|
||||
) -> Result<(&[Parameter], &[Parameter]), RequiredParamAfterOptionalParam> {
|
||||
@ -3011,7 +2751,7 @@ impl FunctionExpression {
|
||||
if param.optional {
|
||||
found_optional = true;
|
||||
} else if found_optional {
|
||||
return Err(RequiredParamAfterOptionalParam(param.clone()));
|
||||
return Err(RequiredParamAfterOptionalParam(Box::new(param.clone())));
|
||||
}
|
||||
}
|
||||
let boundary = self.params.partition_point(|param| !param.optional);
|
||||
@ -3059,17 +2799,13 @@ pub struct ReturnStatement {
|
||||
pub end: usize,
|
||||
pub argument: Expr,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
#[ts(optional)]
|
||||
pub digest: Option<Digest>,
|
||||
}
|
||||
|
||||
impl_value_meta!(ReturnStatement);
|
||||
|
||||
impl ReturnStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.argument.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
/// Describes information about a hover.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -1,13 +1,6 @@
|
||||
use crate::errors::KclError;
|
||||
use crate::executor::BodyType;
|
||||
use crate::executor::ExecState;
|
||||
use crate::executor::ExecutorContext;
|
||||
use crate::executor::KclValue;
|
||||
use crate::executor::Metadata;
|
||||
use crate::executor::SourceRange;
|
||||
use crate::executor::StatementKind;
|
||||
|
||||
use super::compute_digest;
|
||||
use super::impl_value_meta;
|
||||
use super::ConstraintLevel;
|
||||
use super::Hover;
|
||||
@ -15,7 +8,6 @@ use super::{Digest, Expr};
|
||||
use databake::*;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest as DigestTrait, Sha256};
|
||||
|
||||
// TODO: This should be its own type, similar to Program,
|
||||
// but guaranteed to have an Expression as its final item.
|
||||
@ -56,14 +48,6 @@ impl_value_meta!(IfExpression);
|
||||
impl_value_meta!(ElseIf);
|
||||
|
||||
impl IfExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.cond.compute_digest());
|
||||
hasher.update(slf.then_val.compute_digest());
|
||||
for else_if in &mut slf.else_ifs {
|
||||
hasher.update(else_if.compute_digest());
|
||||
}
|
||||
hasher.update(slf.final_else.compute_digest());
|
||||
});
|
||||
fn source_ranges(&self) -> Vec<SourceRange> {
|
||||
vec![SourceRange::from(self)]
|
||||
}
|
||||
@ -101,63 +85,12 @@ impl From<&ElseIf> for Metadata {
|
||||
}
|
||||
|
||||
impl ElseIf {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.cond.compute_digest());
|
||||
hasher.update(slf.then_val.compute_digest());
|
||||
});
|
||||
#[allow(dead_code)]
|
||||
fn source_ranges(&self) -> Vec<SourceRange> {
|
||||
vec![SourceRange([self.start, self.end])]
|
||||
}
|
||||
}
|
||||
|
||||
// Execution
|
||||
|
||||
impl IfExpression {
|
||||
#[async_recursion::async_recursion]
|
||||
pub async fn get_result(&self, exec_state: &mut ExecState, ctx: &ExecutorContext) -> Result<KclValue, KclError> {
|
||||
// Check the `if` branch.
|
||||
let cond = ctx
|
||||
.execute_expr(&self.cond, exec_state, &Metadata::from(self), StatementKind::Expression)
|
||||
.await?
|
||||
.get_bool()?;
|
||||
if cond {
|
||||
let block_result = ctx.inner_execute(&self.then_val, exec_state, BodyType::Block).await?;
|
||||
// Block must end in an expression, so this has to be Some.
|
||||
// Enforced by the parser.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/4015
|
||||
return Ok(block_result.unwrap());
|
||||
}
|
||||
|
||||
// Check any `else if` branches.
|
||||
for else_if in &self.else_ifs {
|
||||
let cond = ctx
|
||||
.execute_expr(
|
||||
&else_if.cond,
|
||||
exec_state,
|
||||
&Metadata::from(self),
|
||||
StatementKind::Expression,
|
||||
)
|
||||
.await?
|
||||
.get_bool()?;
|
||||
if cond {
|
||||
let block_result = ctx
|
||||
.inner_execute(&else_if.then_val, exec_state, BodyType::Block)
|
||||
.await?;
|
||||
// Block must end in an expression, so this has to be Some.
|
||||
// Enforced by the parser.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/4015
|
||||
return Ok(block_result.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
// Run the final `else` branch.
|
||||
ctx.inner_execute(&self.final_else, exec_state, BodyType::Block)
|
||||
.await
|
||||
.map(|expr| expr.unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
// IDE support and refactors
|
||||
|
||||
impl IfExpression {
|
||||
@ -208,8 +141,3 @@ impl ElseIf {
|
||||
self.then_val.rename_identifiers(old_name, new_name);
|
||||
}
|
||||
}
|
||||
|
||||
// Linting
|
||||
|
||||
impl IfExpression {}
|
||||
impl ElseIf {}
|
||||
|
391
src/wasm-lib/kcl/src/ast/types/digest.rs
Normal file
@ -0,0 +1,391 @@
|
||||
use sha2::{Digest as DigestTrait, Sha256};
|
||||
|
||||
use super::{
|
||||
ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, ElseIf, Expr,
|
||||
ExpressionStatement, FnArgType, FunctionExpression, Identifier, IfExpression, ImportItem, ImportStatement, Literal,
|
||||
LiteralIdentifier, MemberExpression, MemberObject, NonCodeMeta, NonCodeNode, NonCodeValue, ObjectExpression,
|
||||
ObjectProperty, Parameter, PipeExpression, PipeSubstitution, Program, ReturnStatement, TagDeclarator,
|
||||
UnaryExpression, VariableDeclaration, VariableDeclarator,
|
||||
};
|
||||
|
||||
/// Position-independent digest of the AST node.
|
||||
pub type Digest = [u8; 32];
|
||||
|
||||
macro_rules! compute_digest {
|
||||
(|$slf:ident, $hasher:ident| $body:block) => {
|
||||
/// Compute a digest over the AST node.
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
if let Some(node_digest) = self.digest {
|
||||
return node_digest;
|
||||
}
|
||||
|
||||
let mut $hasher = Sha256::new();
|
||||
|
||||
#[allow(unused_mut)]
|
||||
let mut $slf = self;
|
||||
|
||||
$hasher.update(std::any::type_name::<Self>());
|
||||
|
||||
$body
|
||||
|
||||
let node_digest: Digest = $hasher.finalize().into();
|
||||
$slf.digest = Some(node_digest);
|
||||
node_digest
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl ImportItem {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
if let Some(alias) = &mut slf.alias {
|
||||
hasher.update([1]);
|
||||
hasher.update(alias.compute_digest());
|
||||
} else {
|
||||
hasher.update([0]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl ImportStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
for item in &mut slf.items {
|
||||
hasher.update(item.compute_digest());
|
||||
}
|
||||
let path = slf.path.as_bytes();
|
||||
hasher.update(path.len().to_ne_bytes());
|
||||
hasher.update(path);
|
||||
});
|
||||
}
|
||||
|
||||
impl Program {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.body.len().to_ne_bytes());
|
||||
for body_item in slf.body.iter_mut() {
|
||||
hasher.update(body_item.compute_digest());
|
||||
}
|
||||
hasher.update(slf.non_code_meta.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl BodyItem {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
BodyItem::ImportStatement(s) => s.compute_digest(),
|
||||
BodyItem::ExpressionStatement(es) => es.compute_digest(),
|
||||
BodyItem::VariableDeclaration(vs) => vs.compute_digest(),
|
||||
BodyItem::ReturnStatement(rs) => rs.compute_digest(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expr {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
Expr::Literal(lit) => lit.compute_digest(),
|
||||
Expr::Identifier(id) => id.compute_digest(),
|
||||
Expr::TagDeclarator(tag) => tag.compute_digest(),
|
||||
Expr::BinaryExpression(be) => be.compute_digest(),
|
||||
Expr::FunctionExpression(fe) => fe.compute_digest(),
|
||||
Expr::CallExpression(ce) => ce.compute_digest(),
|
||||
Expr::PipeExpression(pe) => pe.compute_digest(),
|
||||
Expr::PipeSubstitution(ps) => ps.compute_digest(),
|
||||
Expr::ArrayExpression(ae) => ae.compute_digest(),
|
||||
Expr::ArrayRangeExpression(are) => are.compute_digest(),
|
||||
Expr::ObjectExpression(oe) => oe.compute_digest(),
|
||||
Expr::MemberExpression(me) => me.compute_digest(),
|
||||
Expr::UnaryExpression(ue) => ue.compute_digest(),
|
||||
Expr::IfExpression(e) => e.compute_digest(),
|
||||
Expr::None(_) => {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(b"Value::None");
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BinaryPart {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
BinaryPart::Literal(lit) => lit.compute_digest(),
|
||||
BinaryPart::Identifier(id) => id.compute_digest(),
|
||||
BinaryPart::BinaryExpression(be) => be.compute_digest(),
|
||||
BinaryPart::CallExpression(ce) => ce.compute_digest(),
|
||||
BinaryPart::UnaryExpression(ue) => ue.compute_digest(),
|
||||
BinaryPart::MemberExpression(me) => me.compute_digest(),
|
||||
BinaryPart::IfExpression(e) => e.compute_digest(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MemberObject {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
MemberObject::MemberExpression(me) => me.compute_digest(),
|
||||
MemberObject::Identifier(id) => id.compute_digest(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LiteralIdentifier {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
match self {
|
||||
LiteralIdentifier::Identifier(id) => id.compute_digest(),
|
||||
LiteralIdentifier::Literal(lit) => lit.compute_digest(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl FnArgType {
|
||||
pub fn compute_digest(&mut self) -> Digest {
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
match self {
|
||||
FnArgType::Primitive(prim) => {
|
||||
hasher.update(b"FnArgType::Primitive");
|
||||
hasher.update(prim.digestable_id())
|
||||
}
|
||||
FnArgType::Array(prim) => {
|
||||
hasher.update(b"FnArgType::Array");
|
||||
hasher.update(prim.digestable_id())
|
||||
}
|
||||
FnArgType::Object { properties } => {
|
||||
hasher.update(b"FnArgType::Object");
|
||||
hasher.update(properties.len().to_ne_bytes());
|
||||
for prop in properties.iter_mut() {
|
||||
hasher.update(prop.compute_digest());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hasher.finalize().into()
|
||||
}
|
||||
}
|
||||
impl Parameter {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.identifier.compute_digest());
|
||||
match &mut slf.type_ {
|
||||
Some(arg) => {
|
||||
hasher.update(b"Parameter::type_::Some");
|
||||
hasher.update(arg.compute_digest())
|
||||
}
|
||||
None => {
|
||||
hasher.update(b"Parameter::type_::None");
|
||||
}
|
||||
}
|
||||
hasher.update(if slf.optional { [1] } else { [0] })
|
||||
});
|
||||
}
|
||||
|
||||
impl FunctionExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.params.len().to_ne_bytes());
|
||||
for param in slf.params.iter_mut() {
|
||||
hasher.update(param.compute_digest());
|
||||
}
|
||||
hasher.update(slf.body.compute_digest());
|
||||
match &mut slf.return_type {
|
||||
Some(rt) => {
|
||||
hasher.update(b"FunctionExpression::return_type::Some");
|
||||
hasher.update(rt.compute_digest());
|
||||
}
|
||||
None => {
|
||||
hasher.update(b"FunctionExpression::return_type::None");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl ReturnStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.argument.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl NonCodeNode {
|
||||
compute_digest!(|slf, hasher| {
|
||||
match &slf.value {
|
||||
NonCodeValue::Shebang { value } => {
|
||||
hasher.update(value);
|
||||
}
|
||||
NonCodeValue::InlineComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::BlockComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::NewLineBlockComment { value, style } => {
|
||||
hasher.update(value);
|
||||
hasher.update(style.digestable_id());
|
||||
}
|
||||
NonCodeValue::NewLine => {
|
||||
hasher.update(b"\r\n");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl NonCodeMeta {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let mut keys = slf.non_code_nodes.keys().copied().collect::<Vec<_>>();
|
||||
keys.sort();
|
||||
|
||||
for key in keys.into_iter() {
|
||||
hasher.update(key.to_ne_bytes());
|
||||
let nodes = slf.non_code_nodes.get_mut(&key).unwrap();
|
||||
hasher.update(nodes.len().to_ne_bytes());
|
||||
for node in nodes.iter_mut() {
|
||||
hasher.update(node.compute_digest());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl ExpressionStatement {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.expression.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl VariableDeclaration {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.declarations.len().to_ne_bytes());
|
||||
for declarator in &mut slf.declarations {
|
||||
hasher.update(declarator.compute_digest());
|
||||
}
|
||||
hasher.update(slf.visibility.digestable_id());
|
||||
hasher.update(slf.kind.digestable_id());
|
||||
});
|
||||
}
|
||||
|
||||
impl VariableDeclarator {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.id.compute_digest());
|
||||
hasher.update(slf.init.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.value.digestable_id());
|
||||
});
|
||||
}
|
||||
|
||||
impl Identifier {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
});
|
||||
}
|
||||
|
||||
impl TagDeclarator {
|
||||
compute_digest!(|slf, hasher| {
|
||||
let name = slf.name.as_bytes();
|
||||
hasher.update(name.len().to_ne_bytes());
|
||||
hasher.update(name);
|
||||
});
|
||||
}
|
||||
|
||||
impl PipeSubstitution {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(b"PipeSubstitution");
|
||||
});
|
||||
}
|
||||
|
||||
impl ArrayExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.elements.len().to_ne_bytes());
|
||||
for value in slf.elements.iter_mut() {
|
||||
hasher.update(value.compute_digest());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl ArrayRangeExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.start_element.compute_digest());
|
||||
hasher.update(slf.end_element.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl ObjectExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.properties.len().to_ne_bytes());
|
||||
for prop in slf.properties.iter_mut() {
|
||||
hasher.update(prop.compute_digest());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
impl ObjectProperty {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.key.compute_digest());
|
||||
hasher.update(slf.value.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl MemberExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.object.compute_digest());
|
||||
hasher.update(slf.property.compute_digest());
|
||||
hasher.update(if slf.computed { [1] } else { [0] });
|
||||
});
|
||||
}
|
||||
|
||||
impl BinaryExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.operator.digestable_id());
|
||||
hasher.update(slf.left.compute_digest());
|
||||
hasher.update(slf.right.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl UnaryExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.operator.digestable_id());
|
||||
hasher.update(slf.argument.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl PipeExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.body.len().to_ne_bytes());
|
||||
for value in slf.body.iter_mut() {
|
||||
hasher.update(value.compute_digest());
|
||||
}
|
||||
hasher.update(slf.non_code_meta.compute_digest());
|
||||
});
|
||||
}
|
||||
|
||||
impl CallExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.callee.compute_digest());
|
||||
hasher.update(slf.arguments.len().to_ne_bytes());
|
||||
for argument in slf.arguments.iter_mut() {
|
||||
hasher.update(argument.compute_digest());
|
||||
}
|
||||
hasher.update(if slf.optional { [1] } else { [0] });
|
||||
});
|
||||
}
|
||||
|
||||
impl IfExpression {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.cond.compute_digest());
|
||||
hasher.update(slf.then_val.compute_digest());
|
||||
for else_if in &mut slf.else_ifs {
|
||||
hasher.update(else_if.compute_digest());
|
||||
}
|
||||
hasher.update(slf.final_else.compute_digest());
|
||||
});
|
||||
}
|
||||
impl ElseIf {
|
||||
compute_digest!(|slf, hasher| {
|
||||
hasher.update(slf.cond.compute_digest());
|
||||
hasher.update(slf.then_val.compute_digest());
|
||||
});
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
use super::{
|
||||
human_friendly_type, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart,
|
||||
CallExpression, Expr, LiteralIdentifier, LiteralValue, MemberExpression, MemberObject, ObjectExpression,
|
||||
TagDeclarator, UnaryExpression, UnaryOperator,
|
||||
CallExpression, Expr, IfExpression, LiteralIdentifier, LiteralValue, MemberExpression, MemberObject,
|
||||
ObjectExpression, TagDeclarator, UnaryExpression, UnaryOperator,
|
||||
};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
@ -65,93 +65,9 @@ impl MemberExpression {
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_result(&self, exec_state: &mut ExecState) -> Result<KclValue, KclError> {
|
||||
#[derive(Debug)]
|
||||
enum Property {
|
||||
Number(usize),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl Property {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Property::Number(_) => "number",
|
||||
Property::String(_) => "string",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let property_src: SourceRange = self.property.clone().into();
|
||||
let property_sr = vec![property_src];
|
||||
|
||||
let property: Property = match self.property.clone() {
|
||||
LiteralIdentifier::Identifier(identifier) => {
|
||||
let name = identifier.name;
|
||||
if !self.computed {
|
||||
// Treat the property as a literal
|
||||
Property::String(name.to_string())
|
||||
} else {
|
||||
// Actually evaluate memory to compute the property.
|
||||
let prop = exec_state.memory.get(&name, property_src)?;
|
||||
let KclValue::UserVal(prop) = prop else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!(
|
||||
"{name} is not a valid property/index, you can only use a string or int (>= 0) here",
|
||||
),
|
||||
}));
|
||||
};
|
||||
match prop.value {
|
||||
JValue::Number(ref num) => {
|
||||
num
|
||||
.as_u64()
|
||||
.and_then(|x| usize::try_from(x).ok())
|
||||
.map(Property::Number)
|
||||
.ok_or_else(|| {
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!(
|
||||
"{name}'s value is not a valid property/index, you can only use a string or int (>= 0) here",
|
||||
),
|
||||
})
|
||||
})?
|
||||
}
|
||||
JValue::String(ref x) => Property::String(x.to_owned()),
|
||||
_ => {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!(
|
||||
"{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array",
|
||||
),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
LiteralIdentifier::Literal(literal) => {
|
||||
let value = literal.value.clone();
|
||||
match value {
|
||||
LiteralValue::IInteger(x) => {
|
||||
if let Ok(x) = u64::try_from(x) {
|
||||
Property::Number(x.try_into().unwrap())
|
||||
} else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!("{x} is not a valid index, indices must be whole numbers >= 0"),
|
||||
}));
|
||||
}
|
||||
}
|
||||
LiteralValue::String(s) => Property::String(s),
|
||||
_ => {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: vec![self.into()],
|
||||
message: "Only strings or ints (>= 0) can be properties/indexes".to_owned(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let property = Property::try_from(self.computed, self.property.clone(), exec_state, self.into())?;
|
||||
let object = match &self.object {
|
||||
// TODO: Don't use recursion here, use a loop.
|
||||
MemberObject::MemberExpression(member_expr) => member_expr.get_result(exec_state)?,
|
||||
@ -635,13 +551,13 @@ impl ArrayRangeExpression {
|
||||
.execute_expr(&self.start_element, exec_state, &metadata, StatementKind::Expression)
|
||||
.await?
|
||||
.get_json_value()?;
|
||||
let start = parse_json_number_as_u64(&start, (&*self.start_element).into())?;
|
||||
let start = parse_json_number_as_i64(&start, (&*self.start_element).into())?;
|
||||
let metadata = Metadata::from(&*self.end_element);
|
||||
let end = ctx
|
||||
.execute_expr(&self.end_element, exec_state, &metadata, StatementKind::Expression)
|
||||
.await?
|
||||
.get_json_value()?;
|
||||
let end = parse_json_number_as_u64(&end, (&*self.end_element).into())?;
|
||||
let end = parse_json_number_as_i64(&end, (&*self.end_element).into())?;
|
||||
|
||||
if end < start {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
@ -687,9 +603,9 @@ impl ObjectExpression {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_json_number_as_u64(j: &serde_json::Value, source_range: SourceRange) -> Result<u64, KclError> {
|
||||
fn parse_json_number_as_i64(j: &serde_json::Value, source_range: SourceRange) -> Result<i64, KclError> {
|
||||
if let serde_json::Value::Number(n) = &j {
|
||||
n.as_u64().ok_or_else(|| {
|
||||
n.as_i64().ok_or_else(|| {
|
||||
KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![source_range],
|
||||
message: format!("Invalid integer: {}", j),
|
||||
@ -738,3 +654,150 @@ pub fn json_as_bool(j: &serde_json::Value) -> Option<bool> {
|
||||
JValue::Object(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
impl IfExpression {
|
||||
#[async_recursion]
|
||||
pub async fn get_result(&self, exec_state: &mut ExecState, ctx: &ExecutorContext) -> Result<KclValue, KclError> {
|
||||
// Check the `if` branch.
|
||||
let cond = ctx
|
||||
.execute_expr(&self.cond, exec_state, &Metadata::from(self), StatementKind::Expression)
|
||||
.await?
|
||||
.get_bool()?;
|
||||
if cond {
|
||||
let block_result = ctx.inner_execute(&self.then_val, exec_state, BodyType::Block).await?;
|
||||
// Block must end in an expression, so this has to be Some.
|
||||
// Enforced by the parser.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/4015
|
||||
return Ok(block_result.unwrap());
|
||||
}
|
||||
|
||||
// Check any `else if` branches.
|
||||
for else_if in &self.else_ifs {
|
||||
let cond = ctx
|
||||
.execute_expr(
|
||||
&else_if.cond,
|
||||
exec_state,
|
||||
&Metadata::from(self),
|
||||
StatementKind::Expression,
|
||||
)
|
||||
.await?
|
||||
.get_bool()?;
|
||||
if cond {
|
||||
let block_result = ctx
|
||||
.inner_execute(&else_if.then_val, exec_state, BodyType::Block)
|
||||
.await?;
|
||||
// Block must end in an expression, so this has to be Some.
|
||||
// Enforced by the parser.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/4015
|
||||
return Ok(block_result.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
// Run the final `else` branch.
|
||||
ctx.inner_execute(&self.final_else, exec_state, BodyType::Block)
|
||||
.await
|
||||
.map(|expr| expr.unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Property {
|
||||
Number(usize),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl Property {
|
||||
fn try_from(
|
||||
computed: bool,
|
||||
value: LiteralIdentifier,
|
||||
exec_state: &ExecState,
|
||||
sr: SourceRange,
|
||||
) -> Result<Self, KclError> {
|
||||
let property_sr = vec![sr];
|
||||
let property_src: SourceRange = value.clone().into();
|
||||
match value {
|
||||
LiteralIdentifier::Identifier(identifier) => {
|
||||
let name = identifier.name;
|
||||
if !computed {
|
||||
// Treat the property as a literal
|
||||
Ok(Property::String(name.to_string()))
|
||||
} else {
|
||||
// Actually evaluate memory to compute the property.
|
||||
let prop = exec_state.memory.get(&name, property_src)?;
|
||||
let KclValue::UserVal(prop) = prop else {
|
||||
return Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!(
|
||||
"{name} is not a valid property/index, you can only use a string or int (>= 0) here",
|
||||
),
|
||||
}));
|
||||
};
|
||||
jvalue_to_prop(&prop.value, property_sr, &name)
|
||||
}
|
||||
}
|
||||
LiteralIdentifier::Literal(literal) => {
|
||||
let value = literal.value.clone();
|
||||
match value {
|
||||
LiteralValue::IInteger(x) => {
|
||||
if let Ok(x) = u64::try_from(x) {
|
||||
Ok(Property::Number(x.try_into().unwrap()))
|
||||
} else {
|
||||
Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message: format!("{x} is not a valid index, indices must be whole numbers >= 0"),
|
||||
}))
|
||||
}
|
||||
}
|
||||
LiteralValue::String(s) => Ok(Property::String(s)),
|
||||
_ => Err(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: vec![sr],
|
||||
message: "Only strings or ints (>= 0) can be properties/indexes".to_owned(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn jvalue_to_prop(value: &JValue, property_sr: Vec<SourceRange>, name: &str) -> Result<Property, KclError> {
|
||||
let make_err = |message: String| {
|
||||
Err::<Property, _>(KclError::Semantic(KclErrorDetails {
|
||||
source_ranges: property_sr,
|
||||
message,
|
||||
}))
|
||||
};
|
||||
const MUST_BE_POSINT: &str = "indices must be whole positive numbers";
|
||||
const TRY_INT: &str = "try using the int() function to make this a whole number";
|
||||
match value {
|
||||
JValue::Number(ref num) => {
|
||||
let maybe_uint = num.as_u64().and_then(|x| usize::try_from(x).ok());
|
||||
if let Some(uint) = maybe_uint {
|
||||
Ok(Property::Number(uint))
|
||||
} else if let Some(iint) = num.as_i64() {
|
||||
make_err(format!("'{iint}' is not a valid index, {MUST_BE_POSINT}"))
|
||||
} else if let Some(fnum) = num.as_f64() {
|
||||
if fnum < 0.0 {
|
||||
make_err(format!("'{fnum}' is not a valid index, {MUST_BE_POSINT}"))
|
||||
} else if fnum.fract() == 0.0 {
|
||||
make_err(format!("'{fnum:.1}' is stored as a fractional number but indices must be whole numbers, {TRY_INT}"))
|
||||
} else {
|
||||
make_err(format!("'{fnum}' is not a valid index, {MUST_BE_POSINT}, {TRY_INT}"))
|
||||
}
|
||||
} else {
|
||||
make_err(format!("'{num}' is not a valid index, {MUST_BE_POSINT}"))
|
||||
}
|
||||
}
|
||||
JValue::String(ref x) => Ok(Property::String(x.to_owned())),
|
||||
_ => {
|
||||
make_err(format!("{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array"))
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Property {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Property::Number(_) => "number",
|
||||
Property::String(_) => "string",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
57
src/wasm-lib/kcl/src/engine/engine_utils.rs
Normal file
@ -0,0 +1,57 @@
|
||||
//! Functions for calling into the engine-utils library (a set of C++ utilities containing various logic for client-side CAD processing)
|
||||
//! Note that this binary may not be available to all builds of kcl, so fallbacks that call the engine API should be implemented
|
||||
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
std::Args,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use std::ffi::{CString, CStr};
|
||||
use kittycad_modeling_cmds::{length_unit::LengthUnit, shared::Point3d};
|
||||
|
||||
mod cpp {
|
||||
use std::os::raw::c_char;
|
||||
|
||||
extern "C" {
|
||||
pub fn kcEngineUtilsEvaluatePath(sketch: *const c_char, t: f64) -> *const c_char;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn is_available() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn get_true_path_end_pos(sketch: String, args: &Args) -> Result<Point3d<LengthUnit>, KclError> {
|
||||
let c_string = CString::new(sketch).map_err(|e| {
|
||||
KclError::Internal(KclErrorDetails {
|
||||
message: format!("{:?}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
let arg = c_string.into_raw();
|
||||
let result_string: String;
|
||||
|
||||
unsafe {
|
||||
let result = cpp::kcEngineUtilsEvaluatePath(arg, 1.0);
|
||||
let result_cstr = CStr::from_ptr(result);
|
||||
let str_slice: &str = result_cstr.to_str().map_err(|e| {
|
||||
KclError::Internal(KclErrorDetails {
|
||||
message: format!("{:?}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
let str_buf: String = str_slice.to_owned();
|
||||
result_string = str_buf.clone();
|
||||
let _ = CString::from_raw(arg);
|
||||
}
|
||||
|
||||
let point: Point3d<f64> = serde_json::from_str(&result_string).map_err(|e| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: format!("Failed to path position from json: {}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
|
||||
Ok(Point3d::<f64>::from(point).map(LengthUnit))
|
||||
}
|
35
src/wasm-lib/kcl/src/engine/engine_utils_api.rs
Normal file
@ -0,0 +1,35 @@
|
||||
//! Functions for calling into the engine-utils library (a set of C++ utilities containing various logic for client-side CAD processing)
|
||||
//! Note that this binary may not be available to all builds of kcl, so fallbacks that call the engine API should be implemented
|
||||
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
std::Args,
|
||||
};
|
||||
use crate::engine::kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use anyhow::Result;
|
||||
use kittycad_modeling_cmds::{length_unit::LengthUnit, ok_response::OkModelingCmdResponse, shared::Point3d, websocket::OkWebSocketResponseData};
|
||||
|
||||
pub fn is_available() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn get_true_path_end_pos(sketch: String, args: &Args) -> Result<Point3d<LengthUnit>, KclError> {
|
||||
let id = uuid::Uuid::new_v4();
|
||||
|
||||
let resp = args.send_modeling_cmd(id, ModelingCmd::from(mcmd::EngineUtilEvaluatePath {
|
||||
path_json: sketch,
|
||||
t: 1.0,
|
||||
})).await?;
|
||||
|
||||
let OkWebSocketResponseData::Modeling {
|
||||
modeling_response: OkModelingCmdResponse::EngineUtilEvaluatePath(point),
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("mcmd::EngineUtilEvaluatePath response was not as expected: {:?}", resp),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
};
|
||||
|
||||
Ok(point.pos)
|
||||
}
|
56
src/wasm-lib/kcl/src/engine/engine_utils_wasm.rs
Normal file
@ -0,0 +1,56 @@
|
||||
//! Functions for calling into the engine-utils library (a set of C++ utilities containing various logic for client-side CAD processing)
|
||||
//! Note that this binary may not be available to all builds of kcl, so fallbacks that call the engine API should be implemented
|
||||
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
std::Args,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use kittycad_modeling_cmds::{length_unit::LengthUnit, shared::Point3d};
|
||||
mod cpp {
|
||||
use wasm_bindgen::prelude::wasm_bindgen;
|
||||
|
||||
#[wasm_bindgen(module = "/../../lib/engineUtils.ts")]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(js_name = getTruePathEndPos, catch)]
|
||||
pub fn get_true_path_end_pos(sketch: String) -> Result<js_sys::Promise, js_sys::Error>;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_available() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn call_cpp<F>(args: &Args, f: F) -> Result<String, KclError>
|
||||
where
|
||||
F: FnOnce() -> Result<js_sys::Promise, js_sys::Error>,
|
||||
{
|
||||
let promise = f().map_err(|e| {
|
||||
KclError::Internal(KclErrorDetails {
|
||||
message: format!("{:?}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
|
||||
let result = crate::wasm::JsFuture::from(promise).await.map_err(|e| {
|
||||
KclError::Internal(KclErrorDetails {
|
||||
message: format!("{:?}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
|
||||
Ok(result.as_string().unwrap_or_default())
|
||||
}
|
||||
|
||||
pub async fn get_true_path_end_pos(sketch: String, args: &Args) -> Result<Point3d<LengthUnit>, KclError> {
|
||||
let result_str = call_cpp(args, || cpp::get_true_path_end_pos(sketch.into())).await?;
|
||||
|
||||
let point: Point3d<f64> = serde_json::from_str(&result_str).map_err(|e| {
|
||||
KclError::Type(KclErrorDetails {
|
||||
message: format!("Failed to path position from json: {}", e),
|
||||
source_ranges: vec![args.source_range],
|
||||
})
|
||||
})?;
|
||||
|
||||
Ok(Point3d::<f64>::from(point).map(LengthUnit))
|
||||
}
|
@ -8,6 +8,17 @@ pub mod conn_mock;
|
||||
#[cfg(feature = "engine")]
|
||||
pub mod conn_wasm;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(feature = "native-engine-utils")]
|
||||
pub mod engine_utils;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(feature = "wasm-engine-utils")]
|
||||
pub mod engine_utils_wasm;
|
||||
|
||||
#[cfg(feature = "engine")]
|
||||
#[cfg(any(not(target_arch = "wasm32"), all(not(feature = "native-engine-utils"), not(feature = "wasm-engine-utils"))))]
|
||||
pub mod engine_utils_api;
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
|
@ -1601,6 +1601,19 @@ pub enum Path {
|
||||
#[serde(flatten)]
|
||||
base: BasePath,
|
||||
},
|
||||
/// An arc (only used for engine-utils arg serialization for now)
|
||||
Arc {
|
||||
#[serde(flatten)]
|
||||
base: BasePath,
|
||||
/// angle range
|
||||
#[ts(type = "[number, number]")]
|
||||
angle_range: [f64; 2],
|
||||
/// center
|
||||
#[ts(type = "[number, number]")]
|
||||
center: [f64; 2],
|
||||
/// the arc's radius
|
||||
radius: f64,
|
||||
},
|
||||
/// A arc that is tangential to the last path segment that goes to a point
|
||||
TangentialArcTo {
|
||||
#[serde(flatten)]
|
||||
@ -1620,6 +1633,10 @@ pub enum Path {
|
||||
center: [f64; 2],
|
||||
/// arc's direction
|
||||
ccw: bool,
|
||||
/// the arc's radius
|
||||
radius: f64,
|
||||
/// the arc's angle offset
|
||||
offset: f64,
|
||||
},
|
||||
// TODO: consolidate segment enums, remove Circle. https://github.com/KittyCAD/modeling-app/issues/3940
|
||||
/// a complete arc
|
||||
@ -1668,6 +1685,7 @@ impl Path {
|
||||
Path::TangentialArcTo { base, .. } => base.geo_meta.id,
|
||||
Path::TangentialArc { base, .. } => base.geo_meta.id,
|
||||
Path::Circle { base, .. } => base.geo_meta.id,
|
||||
Path::Arc { base, .. } => base.geo_meta.id,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1680,6 +1698,7 @@ impl Path {
|
||||
Path::TangentialArcTo { base, .. } => base.tag.clone(),
|
||||
Path::TangentialArc { base, .. } => base.tag.clone(),
|
||||
Path::Circle { base, .. } => base.tag.clone(),
|
||||
Path::Arc { base, .. } => base.tag.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1692,6 +1711,7 @@ impl Path {
|
||||
Path::TangentialArcTo { base, .. } => base,
|
||||
Path::TangentialArc { base, .. } => base,
|
||||
Path::Circle { base, .. } => base,
|
||||
Path::Arc { base, .. } => base,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1704,6 +1724,7 @@ impl Path {
|
||||
Path::TangentialArcTo { base, .. } => Some(base),
|
||||
Path::TangentialArc { base, .. } => Some(base),
|
||||
Path::Circle { base, .. } => Some(base),
|
||||
Path::Arc { base, .. } => Some(base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1342,7 +1342,7 @@ fn declaration_keyword(i: TokenSlice) -> PResult<(VariableKind, Token)> {
|
||||
}
|
||||
|
||||
/// Parse a variable/constant declaration.
|
||||
fn declaration(i: TokenSlice) -> PResult<VariableDeclaration> {
|
||||
fn declaration(i: TokenSlice) -> PResult<Box<VariableDeclaration>> {
|
||||
let (visibility, visibility_token) = opt(terminated(item_visibility, whitespace))
|
||||
.parse_next(i)?
|
||||
.map_or((ItemVisibility::Default, None), |pair| (pair.0, Some(pair.1)));
|
||||
@ -1404,7 +1404,7 @@ fn declaration(i: TokenSlice) -> PResult<VariableDeclaration> {
|
||||
.map_err(|e| e.cut())?;
|
||||
|
||||
let end = val.end();
|
||||
Ok(VariableDeclaration {
|
||||
Ok(Box::new(VariableDeclaration {
|
||||
start,
|
||||
end,
|
||||
declarations: vec![VariableDeclarator {
|
||||
@ -1417,7 +1417,7 @@ fn declaration(i: TokenSlice) -> PResult<VariableDeclaration> {
|
||||
visibility,
|
||||
kind,
|
||||
digest: None,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
impl TryFrom<Token> for Identifier {
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -22,8 +21,6 @@ expression: actual
|
||||
"start": 4,
|
||||
"end": 5,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -22,8 +21,6 @@ expression: actual
|
||||
"start": 2,
|
||||
"end": 3,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -22,8 +21,6 @@ expression: actual
|
||||
"start": 3,
|
||||
"end": 4,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -28,8 +27,7 @@ expression: actual
|
||||
"start": 4,
|
||||
"end": 5,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -37,10 +35,7 @@ expression: actual
|
||||
"start": 8,
|
||||
"end": 9,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -28,8 +27,7 @@ expression: actual
|
||||
"start": 6,
|
||||
"end": 7,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -37,10 +35,7 @@ expression: actual
|
||||
"start": 10,
|
||||
"end": 11,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,8 +19,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -34,8 +33,7 @@ expression: actual
|
||||
"start": 6,
|
||||
"end": 7,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -43,12 +41,9 @@ expression: actual
|
||||
"start": 10,
|
||||
"end": 11,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -56,8 +51,6 @@ expression: actual
|
||||
"start": 16,
|
||||
"end": 17,
|
||||
"value": 4,
|
||||
"raw": "4",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "4"
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -34,8 +33,7 @@ expression: actual
|
||||
"start": 6,
|
||||
"end": 7,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -43,10 +41,8 @@ expression: actual
|
||||
"start": 10,
|
||||
"end": 11,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -54,10 +50,7 @@ expression: actual
|
||||
"start": 16,
|
||||
"end": 17,
|
||||
"value": 4,
|
||||
"raw": "4",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "4"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -40,8 +39,7 @@ expression: actual
|
||||
"start": 7,
|
||||
"end": 8,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -49,10 +47,8 @@ expression: actual
|
||||
"start": 11,
|
||||
"end": 12,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -60,10 +56,8 @@ expression: actual
|
||||
"start": 17,
|
||||
"end": 18,
|
||||
"value": 4,
|
||||
"raw": "4",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "4"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -71,10 +65,7 @@ expression: actual
|
||||
"start": 21,
|
||||
"end": 22,
|
||||
"value": 5,
|
||||
"raw": "5",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "5"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -28,8 +27,7 @@ expression: actual
|
||||
"start": 8,
|
||||
"end": 9,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -37,10 +35,7 @@ expression: actual
|
||||
"start": 12,
|
||||
"end": 13,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -30,28 +30,23 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 0,
|
||||
"end": 8,
|
||||
"name": "distance",
|
||||
"digest": null
|
||||
"name": "distance"
|
||||
},
|
||||
"right": {
|
||||
"type": "Identifier",
|
||||
"type": "Identifier",
|
||||
"start": 11,
|
||||
"end": 12,
|
||||
"name": "p",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"name": "p"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Identifier",
|
||||
"type": "Identifier",
|
||||
"start": 15,
|
||||
"end": 18,
|
||||
"name": "FOS",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"name": "FOS"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -59,10 +54,8 @@ expression: actual
|
||||
"start": 21,
|
||||
"end": 22,
|
||||
"value": 6,
|
||||
"raw": "6",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "6"
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"type": "BinaryExpression",
|
||||
@ -75,18 +68,14 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 26,
|
||||
"end": 36,
|
||||
"name": "sigmaAllow",
|
||||
"digest": null
|
||||
"name": "sigmaAllow"
|
||||
},
|
||||
"right": {
|
||||
"type": "Identifier",
|
||||
"type": "Identifier",
|
||||
"start": 39,
|
||||
"end": 44,
|
||||
"name": "width",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"name": "width"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,8 +13,7 @@ expression: actual
|
||||
"start": 0,
|
||||
"end": 1,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
"right": {
|
||||
"type": "Literal",
|
||||
@ -22,8 +21,6 @@ expression: actual
|
||||
"start": 7,
|
||||
"end": 8,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 15,
|
||||
"name": "boxSketch",
|
||||
"digest": null
|
||||
"name": "boxSketch"
|
||||
},
|
||||
"init": {
|
||||
"type": "PipeExpression",
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 18,
|
||||
"end": 31,
|
||||
"name": "startSketchAt",
|
||||
"digest": null
|
||||
"name": "startSketchAt"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -54,8 +52,7 @@ expression: actual
|
||||
"start": 33,
|
||||
"end": 34,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -63,15 +60,12 @@ expression: actual
|
||||
"start": 36,
|
||||
"end": 37,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -82,8 +76,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 47,
|
||||
"end": 51,
|
||||
"name": "line",
|
||||
"digest": null
|
||||
"name": "line"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -98,8 +91,7 @@ expression: actual
|
||||
"start": 53,
|
||||
"end": 54,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -107,22 +99,18 @@ expression: actual
|
||||
"start": 56,
|
||||
"end": 58,
|
||||
"value": 10,
|
||||
"raw": "10",
|
||||
"digest": null
|
||||
"raw": "10"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 61,
|
||||
"end": 62,
|
||||
"digest": null
|
||||
"end": 62
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -133,8 +121,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 71,
|
||||
"end": 84,
|
||||
"name": "tangentialArc",
|
||||
"digest": null
|
||||
"name": "tangentialArc"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -155,10 +142,8 @@ expression: actual
|
||||
"start": 87,
|
||||
"end": 88,
|
||||
"value": 5,
|
||||
"raw": "5",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -166,22 +151,18 @@ expression: actual
|
||||
"start": 90,
|
||||
"end": 91,
|
||||
"value": 5,
|
||||
"raw": "5",
|
||||
"digest": null
|
||||
"raw": "5"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 94,
|
||||
"end": 95,
|
||||
"digest": null
|
||||
"end": 95
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -192,8 +173,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 104,
|
||||
"end": 108,
|
||||
"name": "line",
|
||||
"digest": null
|
||||
"name": "line"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -208,8 +188,7 @@ expression: actual
|
||||
"start": 110,
|
||||
"end": 111,
|
||||
"value": 5,
|
||||
"raw": "5",
|
||||
"digest": null
|
||||
"raw": "5"
|
||||
},
|
||||
{
|
||||
"type": "UnaryExpression",
|
||||
@ -223,24 +202,19 @@ expression: actual
|
||||
"start": 114,
|
||||
"end": 116,
|
||||
"value": 15,
|
||||
"raw": "15",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "15"
|
||||
}
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 119,
|
||||
"end": 120,
|
||||
"digest": null
|
||||
"end": 120
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -251,8 +225,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 129,
|
||||
"end": 136,
|
||||
"name": "extrude",
|
||||
"digest": null
|
||||
"name": "extrude"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -261,39 +234,22 @@ expression: actual
|
||||
"start": 137,
|
||||
"end": 139,
|
||||
"value": 10,
|
||||
"raw": "10",
|
||||
"digest": null
|
||||
"raw": "10"
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 141,
|
||||
"end": 142,
|
||||
"digest": null
|
||||
"end": 142
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 8,
|
||||
"name": "sg",
|
||||
"digest": null
|
||||
"name": "sg"
|
||||
},
|
||||
"init": {
|
||||
"type": "UnaryExpression",
|
||||
@ -34,22 +33,12 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 12,
|
||||
"end": 17,
|
||||
"name": "scale",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"name": "scale"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 0,
|
||||
"end": 6,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 9,
|
||||
"end": 11,
|
||||
"name": "to",
|
||||
"digest": null
|
||||
"name": "to"
|
||||
},
|
||||
"value": {
|
||||
"type": "ArrayExpression",
|
||||
@ -53,8 +51,7 @@ expression: actual
|
||||
"start": 14,
|
||||
"end": 15,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "UnaryExpression",
|
||||
@ -68,30 +65,17 @@ expression: actual
|
||||
"start": 18,
|
||||
"end": 19,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 13,
|
||||
"name": "myArray",
|
||||
"digest": null
|
||||
"name": "myArray"
|
||||
},
|
||||
"init": {
|
||||
"type": "ArrayRangeExpression",
|
||||
@ -34,8 +33,7 @@ expression: actual
|
||||
"start": 17,
|
||||
"end": 18,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
"endElement": {
|
||||
"type": "Literal",
|
||||
@ -43,23 +41,13 @@ expression: actual
|
||||
"start": 20,
|
||||
"end": 22,
|
||||
"value": 10,
|
||||
"raw": "10",
|
||||
"digest": null
|
||||
"raw": "10"
|
||||
},
|
||||
"endInclusive": true,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"endInclusive": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 8,
|
||||
"end": 24,
|
||||
"name": "firstPrimeNumber",
|
||||
"digest": null
|
||||
"name": "firstPrimeNumber"
|
||||
},
|
||||
"init": {
|
||||
"type": "FunctionExpression",
|
||||
@ -44,26 +43,15 @@ expression: actual
|
||||
"start": 50,
|
||||
"end": 51,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "fn",
|
||||
"digest": null
|
||||
"kind": "fn"
|
||||
},
|
||||
{
|
||||
"type": "ExpressionStatement",
|
||||
@ -79,20 +67,11 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 62,
|
||||
"end": 78,
|
||||
"name": "firstPrimeNumber",
|
||||
"digest": null
|
||||
"name": "firstPrimeNumber"
|
||||
},
|
||||
"arguments": [],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 3,
|
||||
"end": 8,
|
||||
"name": "thing",
|
||||
"digest": null
|
||||
"name": "thing"
|
||||
},
|
||||
"init": {
|
||||
"type": "FunctionExpression",
|
||||
@ -35,11 +34,9 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 12,
|
||||
"end": 17,
|
||||
"name": "param",
|
||||
"digest": null
|
||||
"name": "param"
|
||||
},
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
@ -57,26 +54,15 @@ expression: actual
|
||||
"start": 39,
|
||||
"end": 43,
|
||||
"value": true,
|
||||
"raw": "true",
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"raw": "true"
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "fn",
|
||||
"digest": null
|
||||
"kind": "fn"
|
||||
},
|
||||
{
|
||||
"type": "ExpressionStatement",
|
||||
@ -92,8 +78,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 54,
|
||||
"end": 59,
|
||||
"name": "thing",
|
||||
"digest": null
|
||||
"name": "thing"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -102,20 +87,11 @@ expression: actual
|
||||
"start": 60,
|
||||
"end": 65,
|
||||
"value": false,
|
||||
"raw": "false",
|
||||
"digest": null
|
||||
"raw": "false"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 14,
|
||||
"name": "mySketch",
|
||||
"digest": null
|
||||
"name": "mySketch"
|
||||
},
|
||||
"init": {
|
||||
"type": "PipeExpression",
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 17,
|
||||
"end": 30,
|
||||
"name": "startSketchAt",
|
||||
"digest": null
|
||||
"name": "startSketchAt"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -54,8 +52,7 @@ expression: actual
|
||||
"start": 32,
|
||||
"end": 33,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -63,15 +60,12 @@ expression: actual
|
||||
"start": 34,
|
||||
"end": 35,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -82,8 +76,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 49,
|
||||
"end": 55,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -98,8 +91,7 @@ expression: actual
|
||||
"start": 57,
|
||||
"end": 58,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -107,30 +99,25 @@ expression: actual
|
||||
"start": 60,
|
||||
"end": 61,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 64,
|
||||
"end": 65,
|
||||
"digest": null
|
||||
"end": 65
|
||||
},
|
||||
{
|
||||
"type": "TagDeclarator",
|
||||
"type": "TagDeclarator",
|
||||
"start": 67,
|
||||
"end": 74,
|
||||
"value": "myPath",
|
||||
"digest": null
|
||||
"value": "myPath"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -141,8 +128,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 87,
|
||||
"end": 93,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -157,8 +143,7 @@ expression: actual
|
||||
"start": 95,
|
||||
"end": 96,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -166,22 +151,18 @@ expression: actual
|
||||
"start": 98,
|
||||
"end": 99,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 102,
|
||||
"end": 103,
|
||||
"digest": null
|
||||
"end": 103
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -192,8 +173,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 116,
|
||||
"end": 122,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -208,8 +188,7 @@ expression: actual
|
||||
"start": 124,
|
||||
"end": 125,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -217,30 +196,25 @@ expression: actual
|
||||
"start": 127,
|
||||
"end": 128,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 131,
|
||||
"end": 132,
|
||||
"digest": null
|
||||
"end": 132
|
||||
},
|
||||
{
|
||||
"type": "TagDeclarator",
|
||||
"type": "TagDeclarator",
|
||||
"start": 134,
|
||||
"end": 144,
|
||||
"value": "rightPath",
|
||||
"digest": null
|
||||
"value": "rightPath"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -251,40 +225,23 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 157,
|
||||
"end": 162,
|
||||
"name": "close",
|
||||
"digest": null
|
||||
"name": "close"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 163,
|
||||
"end": 164,
|
||||
"digest": null
|
||||
"end": 164
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 14,
|
||||
"name": "mySketch",
|
||||
"digest": null
|
||||
"name": "mySketch"
|
||||
},
|
||||
"init": {
|
||||
"type": "PipeExpression",
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 17,
|
||||
"end": 30,
|
||||
"name": "startSketchAt",
|
||||
"digest": null
|
||||
"name": "startSketchAt"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -54,8 +52,7 @@ expression: actual
|
||||
"start": 32,
|
||||
"end": 33,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -63,15 +60,12 @@ expression: actual
|
||||
"start": 34,
|
||||
"end": 35,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -82,8 +76,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 41,
|
||||
"end": 47,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -98,8 +91,7 @@ expression: actual
|
||||
"start": 49,
|
||||
"end": 50,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -107,22 +99,18 @@ expression: actual
|
||||
"start": 52,
|
||||
"end": 53,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 56,
|
||||
"end": 57,
|
||||
"digest": null
|
||||
"end": 57
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -133,40 +121,23 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 62,
|
||||
"end": 67,
|
||||
"name": "close",
|
||||
"digest": null
|
||||
"name": "close"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 68,
|
||||
"end": 69,
|
||||
"digest": null
|
||||
"end": 69
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 11,
|
||||
"name": "myBox",
|
||||
"digest": null
|
||||
"name": "myBox"
|
||||
},
|
||||
"init": {
|
||||
"type": "CallExpression",
|
||||
@ -32,8 +31,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 14,
|
||||
"end": 27,
|
||||
"name": "startSketchAt",
|
||||
"digest": null
|
||||
"name": "startSketchAt"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -41,24 +39,14 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 28,
|
||||
"end": 29,
|
||||
"name": "p",
|
||||
"digest": null
|
||||
"name": "p"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 11,
|
||||
"name": "myBox",
|
||||
"digest": null
|
||||
"name": "myBox"
|
||||
},
|
||||
"init": {
|
||||
"type": "PipeExpression",
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 14,
|
||||
"end": 15,
|
||||
"name": "f",
|
||||
"digest": null
|
||||
"name": "f"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -48,12 +46,10 @@ expression: actual
|
||||
"start": 16,
|
||||
"end": 17,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -64,8 +60,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 22,
|
||||
"end": 23,
|
||||
"name": "g",
|
||||
"digest": null
|
||||
"name": "g"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -74,39 +69,22 @@ expression: actual
|
||||
"start": 24,
|
||||
"end": 25,
|
||||
"value": 2,
|
||||
"raw": "2",
|
||||
"digest": null
|
||||
"raw": "2"
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 27,
|
||||
"end": 28,
|
||||
"digest": null
|
||||
"end": 28
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 6,
|
||||
"end": 11,
|
||||
"name": "myBox",
|
||||
"digest": null
|
||||
"name": "myBox"
|
||||
},
|
||||
"init": {
|
||||
"type": "PipeExpression",
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 14,
|
||||
"end": 27,
|
||||
"name": "startSketchAt",
|
||||
"digest": null
|
||||
"name": "startSketchAt"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -47,12 +45,10 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 28,
|
||||
"end": 29,
|
||||
"name": "p",
|
||||
"digest": null
|
||||
"name": "p"
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
},
|
||||
{
|
||||
"type": "CallExpression",
|
||||
@ -63,8 +59,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 34,
|
||||
"end": 38,
|
||||
"name": "line",
|
||||
"digest": null
|
||||
"name": "line"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -79,50 +74,31 @@ expression: actual
|
||||
"start": 40,
|
||||
"end": 41,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Identifier",
|
||||
"type": "Identifier",
|
||||
"start": 43,
|
||||
"end": 44,
|
||||
"name": "l",
|
||||
"digest": null
|
||||
"name": "l"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "PipeSubstitution",
|
||||
"type": "PipeSubstitution",
|
||||
"start": 47,
|
||||
"end": 48,
|
||||
"digest": null
|
||||
"end": 48
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"kind": "const",
|
||||
"digest": null
|
||||
"kind": "const"
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 0,
|
||||
"end": 6,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 9,
|
||||
"end": 11,
|
||||
"name": "to",
|
||||
"digest": null
|
||||
"name": "to"
|
||||
},
|
||||
"value": {
|
||||
"type": "ArrayExpression",
|
||||
@ -53,8 +51,7 @@ expression: actual
|
||||
"start": 14,
|
||||
"end": 15,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -62,28 +59,16 @@ expression: actual
|
||||
"start": 17,
|
||||
"end": 18,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|
@ -20,8 +20,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 0,
|
||||
"end": 6,
|
||||
"name": "lineTo",
|
||||
"digest": null
|
||||
"name": "lineTo"
|
||||
},
|
||||
"arguments": [
|
||||
{
|
||||
@ -38,8 +37,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 9,
|
||||
"end": 11,
|
||||
"name": "to",
|
||||
"digest": null
|
||||
"name": "to"
|
||||
},
|
||||
"value": {
|
||||
"type": "ArrayExpression",
|
||||
@ -53,8 +51,7 @@ expression: actual
|
||||
"start": 14,
|
||||
"end": 15,
|
||||
"value": 0,
|
||||
"raw": "0",
|
||||
"digest": null
|
||||
"raw": "0"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -62,13 +59,10 @@ expression: actual
|
||||
"start": 17,
|
||||
"end": 18,
|
||||
"value": 1,
|
||||
"raw": "1",
|
||||
"digest": null
|
||||
"raw": "1"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "ObjectProperty",
|
||||
@ -78,8 +72,7 @@ expression: actual
|
||||
"type": "Identifier",
|
||||
"start": 21,
|
||||
"end": 25,
|
||||
"name": "from",
|
||||
"digest": null
|
||||
"name": "from"
|
||||
},
|
||||
"value": {
|
||||
"type": "ArrayExpression",
|
||||
@ -93,8 +86,7 @@ expression: actual
|
||||
"start": 28,
|
||||
"end": 29,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
},
|
||||
{
|
||||
"type": "Literal",
|
||||
@ -102,28 +94,16 @@ expression: actual
|
||||
"start": 31,
|
||||
"end": 32,
|
||||
"value": 3,
|
||||
"raw": "3",
|
||||
"digest": null
|
||||
"raw": "3"
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
],
|
||||
"optional": false,
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
"optional": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"nonCodeMeta": {
|
||||
"nonCodeNodes": {},
|
||||
"start": [],
|
||||
"digest": null
|
||||
},
|
||||
"digest": null
|
||||
]
|
||||
}
|
||||
|