Compare commits

..

28 Commits

Author SHA1 Message Date
e8a82ea85e Clean up for PR 2025-04-11 09:47:45 -04:00
c1894edaed Remove sha1, add rfc3161TimeStampServer 2025-04-11 08:40:24 -04:00
8c28f34238 Back to publisherName = certificateSubjectName = KittyCAD Inc 2025-04-11 07:40:38 -04:00
d2340628a8 WIP 2025-04-11 07:05:22 -04:00
a1f5cdd690 Update signingHashAlgorithms to include sha1 2025-04-11 05:18:56 -04:00
d1d8d0a82c Use smctl windows certsync to update the windows store in build-apps (attempt) 2025-04-11 05:03:33 -04:00
f76b328136 WIP messing with Get-ChildItem -Recurse Cert 2025-04-11 04:59:45 -04:00
a13548da17 WIP messing with Get-ChildItem -Recurse Cert 2025-04-11 04:55:18 -04:00
65f4b0f239 WIP messing with Get-ChildItem -Recurse Cert 2025-04-11 04:54:05 -04:00
dbcc0bd3b4 WIP messing with Get-ChildItem -Recurse Cert 2025-04-11 04:43:35 -04:00
472b3618ac shell: pwsh 2025-04-11 04:00:47 -04:00
43e89e8bae Add back smksp_registrar.exe list and smctl.exe keypair ls 2025-04-10 19:39:56 -04:00
94a9e01301 Add certificateSha1 2025-04-10 19:02:01 -04:00
3980a1caf8 WIP 2025-04-10 18:58:46 -04:00
d4f23f8469 Trying to follow https://github.com/electron-userland/electron-builder/issues/7605#issuecomment-2257861622 2025-04-10 18:55:46 -04:00
9143c6f08a Test from https://github.com/electron-userland/electron-builder/issues/7605#issuecomment-2257861622 2025-04-10 18:49:23 -04:00
1d4456c458 Loosing my mind 2025-04-10 18:39:13 -04:00
c6fbb4fc63 Fix typo 2025-04-10 18:36:17 -04:00
b7c8d6c185 Copied line by line from docs in dummy script 2025-04-10 17:58:48 -04:00
f23aa5e642 Add dummy pierre-test-windows-code-sign, plus console log on normal script 2025-04-10 17:36:34 -04:00
8bb26c9b89 Remove stdio param for hopefully more logs 2025-04-10 16:56:54 -04:00
0d7aebdee9 DEBUG=electron-builder to get more logs (maybe) 2025-04-10 16:40:11 -04:00
ad333c2055 Try certutil.exe -csp "DigiCert Software Trust Manager KSP", forget yarn cache for now 2025-04-10 16:25:12 -04:00
3559df0c5e Add smksp_registrar.exe register which is somewhere else in their docs. Plus comment 2025-04-10 16:00:53 -04:00
e2dda07829 Update from docs, back to inherit 2025-04-10 15:33:19 -04:00
ea585cb5d6 Trying to get more logs 2025-04-10 15:04:18 -04:00
8af9af2aa7 Another try with the right things commented out 2025-04-10 14:22:30 -04:00
f0ba35c0b2 WIP: Updater on Nightly on Windows failed
Fixes #6256
2025-04-10 14:09:55 -04:00
117 changed files with 2117 additions and 7954 deletions

View File

@ -10,7 +10,8 @@ on:
- 'nightly-v[0-9]+.[0-9]+.[0-9]+' - 'nightly-v[0-9]+.[0-9]+.[0-9]+'
env: env:
IS_RELEASE: ${{ github.ref_type == 'tag' && startsWith(github.ref_name, 'v') }} # IS_RELEASE: ${{ github.ref_type == 'tag' && startsWith(github.ref_name, 'v') }}
IS_RELEASE: true
IS_NIGHTLY: ${{ github.ref_type == 'tag' && startsWith(github.ref_name, 'nightly-v') }} IS_NIGHTLY: ${{ github.ref_type == 'tag' && startsWith(github.ref_name, 'nightly-v') }}
concurrency: concurrency:
@ -99,11 +100,11 @@ jobs:
yarn files:set-version yarn files:set-version
yarn files:flip-to-nightly yarn files:flip-to-nightly
- name: Set release version # - name: Set release version
if: ${{ env.IS_RELEASE == 'true' }} # if: ${{ env.IS_RELEASE == 'true' }}
run: | # run: |
export VERSION=${GITHUB_REF_NAME#v} # export VERSION=${GITHUB_REF_NAME#v}
yarn files:set-version # yarn files:set-version
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
with: with:
@ -183,37 +184,32 @@ jobs:
max_attempts: 3 max_attempts: 3
command: yarn install command: yarn install
# Next steps are from Digicert docs at
# https://docs.digicert.com/en/digicert-keylocker/ci-cd-integrations/scripts/github/scripts-for-signing-using-ksp-library-on-github.html#ksp-signing-using-github-action-488726
- name: Prepare certificate and variables (Windows only) - name: Prepare certificate and variables (Windows only)
if: ${{ (env.IS_RELEASE == 'true' || env.IS_NIGHTLY == 'true') && matrix.os == 'windows-2022' }} if: ${{ (env.IS_RELEASE == 'true' || env.IS_NIGHTLY == 'true') && matrix.os == 'windows-2022' }}
run: | run: |
echo "${{secrets.SM_CLIENT_CERT_FILE_B64 }}" | base64 --decode > /d/Certificate_pkcs12.p12 CERTIFICATE_PATH=$RUNNER_TEMP/certificate.p12
cat /d/Certificate_pkcs12.p12 echo "$SM_CLIENT_CERT_FILE_B64" | base64 --decode > $CERTIFICATE_PATH
echo "::set-output name=version::${GITHUB_REF#refs/tags/v}" echo "SM_CLIENT_CERT_FILE=$CERTIFICATE_PATH" >> "$GITHUB_ENV"
echo "SM_HOST=${{ secrets.SM_HOST }}" >> "$GITHUB_ENV" echo "SM_HOST=${{ secrets.SM_HOST }}" >> "$GITHUB_ENV"
echo "SM_API_KEY=${{ secrets.SM_API_KEY }}" >> "$GITHUB_ENV" echo "SM_API_KEY=${{ secrets.SM_API_KEY }}" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_FILE=D:\\Certificate_pkcs12.p12" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_PASSWORD=${{ secrets.SM_CLIENT_CERT_PASSWORD }}" >> "$GITHUB_ENV" echo "SM_CLIENT_CERT_PASSWORD=${{ secrets.SM_CLIENT_CERT_PASSWORD }}" >> "$GITHUB_ENV"
echo "C:\Program Files (x86)\Windows Kits\10\App Certification Kit" >> $GITHUB_PATH echo "C:\Program Files (x86)\Windows Kits\10\App Certification Kit" >> $GITHUB_PATH
echo "C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools" >> $GITHUB_PATH echo "C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools" >> $GITHUB_PATH
echo "C:\Program Files\DigiCert\DigiCert One Signing Manager Tools" >> $GITHUB_PATH echo "C:\Program Files\DigiCert\DigiCert Keylocker Tools" >> $GITHUB_PATH
shell: bash shell: bash
- name: Setup certicate with SSM KSP (Windows only) - name: Setup certicate with SSM KSP (Windows only)
if: ${{ (env.IS_RELEASE == 'true' || env.IS_NIGHTLY == 'true') && matrix.os == 'windows-2022' }} if: ${{ (env.IS_RELEASE == 'true' || env.IS_NIGHTLY == 'true') && matrix.os == 'windows-2022' }}
run: | run: |
curl -X GET https://one.digicert.com/signingmanager/api-ui/v1/releases/smtools-windows-x64.msi/download -H "x-api-key:%SM_API_KEY%" -o smtools-windows-x64.msi curl -X GET https://one.digicert.com/signingmanager/api-ui/v1/releases/Keylockertools-windows-x64.msi/download -H "x-api-key:%SM_API_KEY%" -o Keylockertools-windows-x64.msi
msiexec /i smtools-windows-x64.msi /quiet /qn msiexec /i Keylockertools-windows-x64.msi /quiet /qn
smksp_registrar.exe list smksp_registrar.exe list
smctl.exe keypair ls smctl.exe keypair ls
C:\Windows\System32\certutil.exe -csp "DigiCert Signing Manager KSP" -key -user C:\Windows\System32\certutil.exe -csp "DigiCert Signing Manager KSP" -key -user
smksp_cert_sync.exe smksp_cert_sync.exe
smctl windows certsync smctl windows certsync
# This last line `smctl windows certsync` was added after windows codesign failures started happening
# with nightly-v25.4.10. It looks like `smksp_cert_sync.exe` used to do the sync to the local cert store,
# but stopped doing it overnight. This extra call that I randomly got from this azure-related doc page
# https://docs.digicert.com/en/digicert-keylocker/code-signing/sign-with-third-party-signing-tools/windows-applications/sign-azure-apps-with-signtool-using-ksp-library.html#sync-certificates--windows-only--618365
# seems to be doing that extra sync that we need for scripts/sign-win.js to work.
# TODO: we still need to make sign-win.js errors fail the workflow, see issue #6276
shell: cmd shell: cmd
- name: Build the app (debug) - name: Build the app (debug)
@ -232,8 +228,8 @@ jobs:
CSC_LINK: ${{ secrets.APPLE_CERTIFICATE }} CSC_LINK: ${{ secrets.APPLE_CERTIFICATE }}
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
CSC_KEYCHAIN: ${{ secrets.APPLE_SIGNING_IDENTITY }} CSC_KEYCHAIN: ${{ secrets.APPLE_SIGNING_IDENTITY }}
WINDOWS_CERTIFICATE_THUMBPRINT: ${{ secrets.WINDOWS_CERTIFICATE_THUMBPRINT }} # DEBUG: "electron-notarize*"
DEBUG: "electron-notarize*" DEBUG: electron-builder
# TODO: Fix electron-notarize flakes. The logs above should help gather more data on failures # TODO: Fix electron-notarize flakes. The logs above should help gather more data on failures
uses: nick-fields/retry@v3.0.2 uses: nick-fields/retry@v3.0.2
with: with:
@ -385,7 +381,7 @@ jobs:
NOTES: ${{ needs.prepare-files.outputs.notes }} NOTES: ${{ needs.prepare-files.outputs.notes }}
PUB_DATE: ${{ github.event.repository.updated_at }} PUB_DATE: ${{ github.event.repository.updated_at }}
WEBSITE_DIR: ${{ env.IS_NIGHTLY == 'true' && 'dl.zoo.dev/releases/modeling-app/nightly' || 'dl.zoo.dev/releases/modeling-app' }} WEBSITE_DIR: ${{ env.IS_NIGHTLY == 'true' && 'dl.zoo.dev/releases/modeling-app/nightly' || 'dl.zoo.dev/releases/modeling-app' }}
URL_CODED_NAME: ${{ env.IS_NIGHTLY == 'true' && 'Zoo%20Design%20Studio%20%28Nightly%29' || 'Zoo%20Design%20Studio' }} URL_CODED_NAME: ${{ env.IS_NIGHTLY == 'true' && 'Zoo%20Modeling%20App%20%28Nightly%29' || 'Zoo%20Modeling%20App' }}
run: | run: |
RELEASE_DIR=https://${WEBSITE_DIR} RELEASE_DIR=https://${WEBSITE_DIR}
jq --null-input \ jq --null-input \

View File

@ -231,11 +231,6 @@ jobs:
env: env:
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }} token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }} snapshottoken: ${{ secrets.KITTYCAD_API_TOKEN }}
TAB_API_URL: ${{ secrets.TAB_API_URL }}
TAB_API_KEY: ${{ secrets.TAB_API_KEY }}
CI_COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
CI_PR_NUMBER: ${{ github.event.pull_request.number }}
TARGET: web
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
if: ${{ needs.conditions.outputs.should-run == 'true' && !cancelled() && (success() || failure()) }} if: ${{ needs.conditions.outputs.should-run == 'true' && !cancelled() && (success() || failure()) }}
@ -370,11 +365,6 @@ jobs:
env: env:
FAIL_ON_CONSOLE_ERRORS: true FAIL_ON_CONSOLE_ERRORS: true
token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }} token: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
TAB_API_URL: ${{ secrets.TAB_API_URL }}
TAB_API_KEY: ${{ secrets.TAB_API_KEY }}
CI_COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
CI_PR_NUMBER: ${{ github.event.pull_request.number }}
TARGET: desktop
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v4
if: ${{ needs.conditions.outputs.should-run == 'true' && always() }} if: ${{ needs.conditions.outputs.should-run == 'true' && always() }}

View File

@ -1,55 +0,0 @@
name: Test Nix Flake
on:
push:
branches: [main]
pull_request:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
nix-flake-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: cachix/install-nix-action@v31
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: nix flake check for all platforms
run: |
nix flake check --all-systems
nix-build-linux:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: cachix/install-nix-action@v31
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: nix build . for x86_64-linux
run: nix build .
nix-build-macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: cachix/install-nix-action@v31
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: nix build . for x86_64-darwin
run: nix build .

1
.gitignore vendored
View File

@ -26,7 +26,6 @@ yarn-error.log*
.idea .idea
.vscode .vscode
.helix .helix
result
# rust # rust
rust/target rust/target

80
docs/kcl/import.md Normal file

File diff suppressed because one or more lines are too long

View File

@ -77,7 +77,6 @@ layout: manual
* [`helix`](kcl/std-helix) * [`helix`](kcl/std-helix)
* [`hole`](kcl/hole) * [`hole`](kcl/hole)
* [`hollow`](kcl/hollow) * [`hollow`](kcl/hollow)
* [`intersect`](kcl/intersect)
* [`lastSegX`](kcl/lastSegX) * [`lastSegX`](kcl/lastSegX)
* [`lastSegY`](kcl/lastSegY) * [`lastSegY`](kcl/lastSegY)
* [`legAngX`](kcl/legAngX) * [`legAngX`](kcl/legAngX)
@ -124,7 +123,6 @@ layout: manual
* [`sqrt`](kcl/sqrt) * [`sqrt`](kcl/sqrt)
* [`startProfileAt`](kcl/startProfileAt) * [`startProfileAt`](kcl/startProfileAt)
* [`startSketchOn`](kcl/startSketchOn) * [`startSketchOn`](kcl/startSketchOn)
* [`subtract`](kcl/subtract)
* [`sweep`](kcl/sweep) * [`sweep`](kcl/sweep)
* [`tangentToEnd`](kcl/tangentToEnd) * [`tangentToEnd`](kcl/tangentToEnd)
* [`tangentialArc`](kcl/tangentialArc) * [`tangentialArc`](kcl/tangentialArc)
@ -133,7 +131,6 @@ layout: manual
* [`toDegrees`](kcl/toDegrees) * [`toDegrees`](kcl/toDegrees)
* [`toRadians`](kcl/toRadians) * [`toRadians`](kcl/toRadians)
* [`translate`](kcl/translate) * [`translate`](kcl/translate)
* [`union`](kcl/union)
* [`xLine`](kcl/xLine) * [`xLine`](kcl/xLine)
* [`yLine`](kcl/yLine) * [`yLine`](kcl/yLine)
* **std::math** * **std::math**

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -252,7 +252,7 @@ Data for an imported geometry.
| Property | Type | Description | Required | | Property | Type | Description | Required |
|----------|------|-------------|----------| |----------|------|-------------|----------|
| `type` |enum: `ImportedGeometry`| | No | | `type` |enum: [`ImportedGeometry`](/docs/kcl/types/ImportedGeometry)| | No |
| `id` |[`string`](/docs/kcl/types/string)| The ID of the imported geometry. | No | | `id` |[`string`](/docs/kcl/types/string)| The ID of the imported geometry. | No |
| `value` |`[` [`string`](/docs/kcl/types/string) `]`| The original file paths. | No | | `value` |`[` [`string`](/docs/kcl/types/string) `]`| The original file paths. | No |

File diff suppressed because one or more lines are too long

View File

@ -1144,7 +1144,7 @@ sketch001 = startSketchOn(XZ)
) )
test( test(
`Can import a local OBJ file`, `Can use the import stdlib function on a local OBJ file`,
{ tag: '@electron' }, { tag: '@electron' },
async ({ page, context }, testInfo) => { async ({ page, context }, testInfo) => {
test.fixme(orRunWhenFullSuiteEnabled()) test.fixme(orRunWhenFullSuiteEnabled())
@ -1194,7 +1194,7 @@ sketch001 = startSketchOn(XZ)
.toBeLessThan(15) .toBeLessThan(15)
}) })
await test.step(`Write the import function line`, async () => { await test.step(`Write the import function line`, async () => {
await u.codeLocator.fill(`import 'cube.obj'\ncube`) await u.codeLocator.fill(`import('cube.obj')`)
await page.waitForTimeout(800) await page.waitForTimeout(800)
}) })
await test.step(`Reset the camera before checking`, async () => { await test.step(`Reset the camera before checking`, async () => {

View File

@ -99,6 +99,7 @@ export class HomePageFixture {
createAndGoToProject = async (projectTitle = 'untitled') => { createAndGoToProject = async (projectTitle = 'untitled') => {
await this.projectsLoaded() await this.projectsLoaded()
await this.projectButtonNew.click() await this.projectButtonNew.click()
await this.projectTextName.click()
await this.projectTextName.fill(projectTitle) await this.projectTextName.fill(projectTitle)
await this.projectButtonContinue.click() await this.projectButtonContinue.click()
} }

View File

@ -233,7 +233,7 @@ export class SceneFixture {
settled = async (cmdBar: CmdBarFixture) => { settled = async (cmdBar: CmdBarFixture) => {
const u = await getUtils(this.page) const u = await getUtils(this.page)
await expect(this.startEditSketchBtn).not.toBeDisabled({ timeout: 15_000 }) await expect(this.startEditSketchBtn).not.toBeDisabled()
await expect(this.startEditSketchBtn).toBeVisible() await expect(this.startEditSketchBtn).toBeVisible()
await cmdBar.openCmdBar() await cmdBar.openCmdBar()

View File

@ -1,63 +0,0 @@
import type { Reporter, TestCase, TestResult } from '@playwright/test/reporter'
class MyAPIReporter implements Reporter {
onTestEnd(test: TestCase, result: TestResult): void {
if (!process.env.TAB_API_URL || !process.env.TAB_API_KEY) {
return
}
const payload = {
// Required information
project: 'https://github.com/KittyCAD/modeling-app',
branch: process.env.GITHUB_HEAD_REF || process.env.GITHUB_REF_NAME || '',
commit: process.env.CI_COMMIT_SHA || process.env.GITHUB_SHA || '',
test: test.titlePath().slice(2).join(' '),
status: result.status,
// Optional information
duration: result.duration / 1000,
message: result.error?.stack,
target: process.env.TARGET || null,
platform: process.env.RUNNER_OS || process.platform,
// Extra test and result data
annotations: test.annotations.map((a) => a.type),
retries: result.retry,
// Extra environment variables
CI_COMMIT_SHA: process.env.CI_COMMIT_SHA || null,
CI_PR_NUMBER: process.env.CI_PR_NUMBER || null,
GITHUB_BASE_REF: process.env.GITHUB_BASE_REF || null,
GITHUB_EVENT_NAME: process.env.GITHUB_EVENT_NAME || null,
GITHUB_HEAD_REF: process.env.GITHUB_HEAD_REF || null,
GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || null,
GITHUB_REF: process.env.GITHUB_REF || null,
GITHUB_SHA: process.env.GITHUB_SHA || null,
GITHUB_WORKFLOW: process.env.GITHUB_WORKFLOW || null,
RUNNER_ARCH: process.env.RUNNER_ARCH || null,
}
void (async () => {
try {
const response = await fetch(`${process.env.TAB_API_URL}/api/results`, {
method: 'POST',
headers: new Headers({
'Content-Type': 'application/json',
'X-API-Key': process.env.TAB_API_KEY || '',
}),
body: JSON.stringify(payload),
})
if (!response.ok && !process.env.CI) {
console.error(
'TAB API - Failed to send test result:',
await response.text()
)
}
} catch {
if (!process.env.CI) {
console.error('TAB API - Unable to send test result')
}
}
})()
}
}
export default MyAPIReporter

View File

@ -6,9 +6,7 @@ import type { NamedView } from '@rust/kcl-lib/bindings/NamedView'
import { import {
createProject, createProject,
orRunWhenFullSuiteEnabled, perProjectsettingsToToml,
perProjectSettingsToToml,
runningOnMac,
tomlToPerProjectSettings, tomlToPerProjectSettings,
} from '@e2e/playwright/test-utils' } from '@e2e/playwright/test-utils'
import { expect, test } from '@e2e/playwright/zoo-test' import { expect, test } from '@e2e/playwright/zoo-test'
@ -59,13 +57,11 @@ function tomlStringOverWriteNamedViewUuids(toml: string): string {
settings.settings.app.named_views = remappedNamedViews settings.settings.app.named_views = remappedNamedViews
} }
} }
return perProjectSettingsToToml(settings) return perProjectsettingsToToml(settings)
} }
test.describe('Named view tests', () => { test.describe('Named view tests', () => {
if (runningOnMac()) { test.skip() // TODO: Jace is working on these
test.fixme(orRunWhenFullSuiteEnabled())
}
test('Verify project.toml is not created', async ({ page }, testInfo) => { test('Verify project.toml is not created', async ({ page }, testInfo) => {
// Create project and load it // Create project and load it
const projectName = 'named-views' const projectName = 'named-views'
@ -109,9 +105,6 @@ test.describe('Named view tests', () => {
PROJECT_SETTINGS_FILE_NAME PROJECT_SETTINGS_FILE_NAME
) )
const toastMessage = page.getByText('Named view uuid1 created.')
await expect(toastMessage).toBeInViewport()
// Expect project.toml to be generated on disk since a named view was created // Expect project.toml to be generated on disk since a named view was created
await expect(async () => { await expect(async () => {
let exists = await fileExists(tempProjectSettingsFilePath) let exists = await fileExists(tempProjectSettingsFilePath)
@ -137,6 +130,7 @@ test.describe('Named view tests', () => {
}, testInfo) => { }, testInfo) => {
const projectName = 'named-views' const projectName = 'named-views'
const myNamedView1 = 'uuid1' const myNamedView1 = 'uuid1'
const myNamedView2 = 'uuid2'
// Create project and go into the project // Create project and go into the project
await createProject({ name: projectName, page }) await createProject({ name: projectName, page })
@ -148,9 +142,6 @@ test.describe('Named view tests', () => {
await cmdBar.argumentInput.fill(myNamedView1) await cmdBar.argumentInput.fill(myNamedView1)
await cmdBar.progressCmdBar(false) await cmdBar.progressCmdBar(false)
let toastMessage = page.getByText('Named view uuid1 created.')
await expect(toastMessage).toBeInViewport()
// Generate file paths for project.toml // Generate file paths for project.toml
const projectDirName = testInfo.outputPath('electron-test-projects-dir') const projectDirName = testInfo.outputPath('electron-test-projects-dir')
const tempProjectSettingsFilePath = join( const tempProjectSettingsFilePath = join(
@ -179,20 +170,17 @@ test.describe('Named view tests', () => {
// Delete a named view // Delete a named view
await cmdBar.openCmdBar() await cmdBar.openCmdBar()
await cmdBar.chooseCommand('delete named view') await cmdBar.chooseCommand('delete named view')
cmdBar.selectOption({ name: myNamedView1 }) cmdBar.selectOption({ name: myNamedView2 })
await cmdBar.progressCmdBar(false) await cmdBar.progressCmdBar(false)
toastMessage = page.getByText('Named view uuid1 removed.')
await expect(toastMessage).toBeInViewport()
await expect(async () => { await expect(async () => {
// Read project.toml into memory again since we deleted a named view // Read project.toml into memory again since we deleted a named view
let tomlString = await fsp.readFile(tempProjectSettingsFilePath, 'utf-8') let tomlString = await fsp.readFile(tempProjectSettingsFilePath, 'utf-8')
// Rewrite the uuids in the named views to match snapshot otherwise they will be randomly generated from rust and break // Rewrite the uuids in the named views to match snapshot otherwise they will be randomly generated from rust and break
tomlString = tomlStringOverWriteNamedViewUuids(tomlString) tomlString = tomlStringOverWriteNamedViewUuids(tomlString)
// Write the entire tomlString to a snapshot. // // Write the entire tomlString to a snapshot.
// There are many key/value pairs to check this is a safer match. // // There are many key/value pairs to check this is a safer match.
expect(tomlString).toMatchSnapshot('verify-named-view-gets-deleted') expect(tomlString).toMatchSnapshot('verify-named-view-gets-deleted')
}).toPass() }).toPass()
}) })
@ -214,9 +202,6 @@ test.describe('Named view tests', () => {
await cmdBar.argumentInput.fill(myNamedView) await cmdBar.argumentInput.fill(myNamedView)
await cmdBar.progressCmdBar(false) await cmdBar.progressCmdBar(false)
let toastMessage = page.getByText('Named view uuid1 created.')
await expect(toastMessage).toBeInViewport()
// Generate file paths for project.toml // Generate file paths for project.toml
const projectDirName = testInfo.outputPath('electron-test-projects-dir') const projectDirName = testInfo.outputPath('electron-test-projects-dir')
const tempProjectSettingsFilePath = join( const tempProjectSettingsFilePath = join(
@ -273,19 +258,26 @@ test.describe('Named view tests', () => {
await cmdBar.argumentInput.fill(myNamedView1) await cmdBar.argumentInput.fill(myNamedView1)
await cmdBar.progressCmdBar(false) await cmdBar.progressCmdBar(false)
let toastMessage = page.getByText('Named view uuid1 created.') await page.waitForTimeout(1000)
await expect(toastMessage).toBeInViewport()
await scene.moveCameraTo({ x: 608, y: 0, z: 0 }, { x: 0, y: 0, z: 0 }) const orbitMouseStart = { x: 800, y: 130 }
await page.waitForTimeout(2500) const orbitMouseEnd = { x: 0, y: 130 }
await page.mouse.move(orbitMouseStart.x, orbitMouseStart.y)
await page.mouse.down({ button: 'middle' })
await page.mouse.move(orbitMouseEnd.x, orbitMouseEnd.y, {
steps: 3,
})
await page.mouse.up({ button: 'middle' })
await page.waitForTimeout(1000)
await cmdBar.openCmdBar() await cmdBar.openCmdBar()
await cmdBar.chooseCommand('create named view') await cmdBar.chooseCommand('create named view')
await cmdBar.argumentInput.fill(myNamedView2) await cmdBar.argumentInput.fill(myNamedView2)
await cmdBar.progressCmdBar(false) await cmdBar.progressCmdBar(false)
toastMessage = page.getByText('Named view uuid2 created.') // Wait a moment for the project.toml to get written to disk with the new view point
await expect(toastMessage).toBeInViewport() await page.waitForTimeout(1000)
// Generate paths for the project.toml // Generate paths for the project.toml
const tempProjectSettingsFilePath = join( const tempProjectSettingsFilePath = join(

View File

@ -1,5 +1,16 @@
[settings] [settings]
app = { }
modeling = { } modeling = { }
text_editor = { } text_editor = { }
command_bar = { } command_bar = { }
[settings.app.named_views.0656fb1a-9640-473e-b334-591dc70c0138]
name = "uuid1"
eye_offset = 1_378.0059
fov_y = 45
is_ortho = false
ortho_scale_enabled = true
ortho_scale_factor = 1.6
pivot_position = [ 0, 0, 0 ]
pivot_rotation = [ 0.5380994, 0.0, 0.0, 0.8428814 ]
world_coord_system = "right_handed_up_z"
version = 1

View File

@ -17,12 +17,12 @@ version = 1
[settings.app.named_views.c810cf04-c6cc-4a4a-8b11-17bf445dcab7] [settings.app.named_views.c810cf04-c6cc-4a4a-8b11-17bf445dcab7]
name = "uuid2" name = "uuid2"
eye_offset = 608 eye_offset = 1_378.0059
fov_y = 45 fov_y = 45
is_ortho = false is_ortho = false
ortho_scale_enabled = true ortho_scale_enabled = true
ortho_scale_factor = 1.6 ortho_scale_factor = 1.6
pivot_position = [ 0, 0, 0 ] pivot_position = [ 1_826.5239, 0.0, 0.0 ]
pivot_rotation = [ 0.5, 0.5, 0.5, 0.5 ] pivot_rotation = [ 0.5380994, 0.0, 0.0, 0.8428814 ]
world_coord_system = "right_handed_up_z" world_coord_system = "right_handed_up_z"
version = 1 version = 1

View File

@ -68,10 +68,12 @@ test.describe('edit with AI example snapshots', () => {
body1CapCoords.x, body1CapCoords.x,
body1CapCoords.y body1CapCoords.y
) )
const yellow: [number, number, number] = [179, 179, 131]
const submittingToast = page.getByText('Submitting to Text-to-CAD API...') const submittingToast = page.getByText('Submitting to Text-to-CAD API...')
await test.step('wait for scene to load select body and check selection came through', async () => { await test.step('wait for scene to load select body and check selection came through', async () => {
await clickBody1Cap() await clickBody1Cap()
await scene.expectPixelColor(yellow, body1CapCoords, 20)
await editor.expectState({ await editor.expectState({
highlightedCode: '', highlightedCode: '',
activeLines: ['|>startProfileAt([-73.64,-42.89],%)'], activeLines: ['|>startProfileAt([-73.64,-42.89],%)'],

View File

@ -588,7 +588,6 @@ test(
'Draft circle should look right', 'Draft circle should look right',
{ tag: '@snapshot' }, { tag: '@snapshot' },
async ({ page, context, cmdBar, scene }) => { async ({ page, context, cmdBar, scene }) => {
test.fixme(orRunWhenFullSuiteEnabled())
const u = await getUtils(page) const u = await getUtils(page)
await page.setViewportSize({ width: 1200, height: 500 }) await page.setViewportSize({ width: 1200, height: 500 })
const PUR = 400 / 37.5 //pixeltoUnitRatio const PUR = 400 / 37.5 //pixeltoUnitRatio

View File

@ -1,15 +0,0 @@
import { createProject } from '@e2e/playwright/test-utils'
import { test } from '@e2e/playwright/zoo-test'
test.describe('Stress test', () => {
test('Create project and load stress test', async ({
cmdBar,
scene,
page,
}, testInfo) => {
const projectName = 'stress-test-project'
// Create and load project
await createProject({ name: projectName, page })
await scene.settled(cmdBar)
})
})

View File

@ -1140,7 +1140,7 @@ export function tomlToPerProjectSettings(
return TOML.parse(toml) return TOML.parse(toml)
} }
export function perProjectSettingsToToml( export function perProjectsettingsToToml(
settings: DeepPartial<ProjectConfiguration> settings: DeepPartial<ProjectConfiguration>
) { ) {
// eslint-disable-next-line no-restricted-syntax // eslint-disable-next-line no-restricted-syntax

View File

@ -33,10 +33,12 @@ win:
- x64 - x64
- arm64 - arm64
signtoolOptions: signtoolOptions:
sign: "./scripts/sign-win.js" certificateSha1: F4C9A52FF7BC26EE5E054946F6B11DEEA94C748D
signingHashAlgorithms: signingHashAlgorithms:
- sha256 - sha256
publisherName: "KittyCAD Inc" # needs to be exactly like on Digicert publisherName: "KittyCAD Inc"
certificateSubjectName: "KittyCAD Inc"
rfc3161TimeStampServer: http://timestamp.digicert.com
icon: "assets/icon.ico" icon: "assets/icon.ico"
fileAssociations: fileAssociations:
- ext: kcl - ext: kcl

77
flake.lock generated
View File

@ -1,56 +1,6 @@
{ {
"nodes": { "nodes": {
"naersk": {
"inputs": {
"nixpkgs": "nixpkgs"
},
"locked": {
"lastModified": 1743800763,
"narHash": "sha256-YFKV+fxEpMgP5VsUcM6Il28lI0NlpM7+oB1XxbBAYCw=",
"owner": "nix-community",
"repo": "naersk",
"rev": "ed0232117731a4c19d3ee93aa0c382a8fe754b01",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "naersk",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": {
"lastModified": 1744157173,
"narHash": "sha256-bWSjxDwq7iVePrhmA7tY2dyMWHuNJo8knkO4y+q4ZkY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6a39c6e495eefabc935d8ddf66aa45d85b85fa3f",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1744157173,
"narHash": "sha256-bWSjxDwq7iVePrhmA7tY2dyMWHuNJo8knkO4y+q4ZkY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "6a39c6e495eefabc935d8ddf66aa45d85b85fa3f",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_3": {
"locked": { "locked": {
"lastModified": 1736320768, "lastModified": 1736320768,
"narHash": "sha256-nIYdTAiKIGnFNugbomgBJR+Xv5F1ZQU+HfaBqJKroC0=", "narHash": "sha256-nIYdTAiKIGnFNugbomgBJR+Xv5F1ZQU+HfaBqJKroC0=",
@ -66,23 +16,38 @@
"type": "github" "type": "github"
} }
}, },
"nixpkgs_2": {
"locked": {
"lastModified": 1728538411,
"narHash": "sha256-f0SBJz1eZ2yOuKUr5CA9BHULGXVSn6miBuUWdTyhUhU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b69de56fac8c2b6f8fd27f2eca01dcda8e0a4221",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": { "root": {
"inputs": { "inputs": {
"naersk": "naersk", "nixpkgs": "nixpkgs",
"nixpkgs": "nixpkgs_2",
"rust-overlay": "rust-overlay" "rust-overlay": "rust-overlay"
} }
}, },
"rust-overlay": { "rust-overlay": {
"inputs": { "inputs": {
"nixpkgs": "nixpkgs_3" "nixpkgs": "nixpkgs_2"
}, },
"locked": { "locked": {
"lastModified": 1744338850, "lastModified": 1736476219,
"narHash": "sha256-pwMIVmsb8fjjT92n5XFDqCsplcX70qVMMT7NulumPXs=", "narHash": "sha256-+qyv3QqdZCdZ3cSO/cbpEY6tntyYjfe1bB12mdpNFaY=",
"owner": "oxalica", "owner": "oxalica",
"repo": "rust-overlay", "repo": "rust-overlay",
"rev": "5e64aecc018e6f775572609e7d7485fdba6985a7", "rev": "de30cc5963da22e9742bbbbb9a3344570ed237b9",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -1,62 +1,72 @@
{ {
description = "zoo.dev modeling-app"; description = "modeling-app development environment";
# Flake inputs
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
rust-overlay.url = "github:oxalica/rust-overlay"; rust-overlay.url = "github:oxalica/rust-overlay"; # A helper for Rust + Nix
naersk.url = "github:nix-community/naersk";
}; };
outputs = { # Flake outputs
self, outputs = { self, nixpkgs, rust-overlay }:
nixpkgs, let
rust-overlay, # Overlays enable you to customize the Nixpkgs attribute set
naersk,
}: let
overlays = [ overlays = [
# Makes a `rust-bin` attribute available in Nixpkgs
(import rust-overlay) (import rust-overlay)
# Provides a `rustToolchain` attribute for Nixpkgs that we can use to
# create a Rust environment
(self: super: { (self: super: {
rustToolchain = super. rust-bin.stable.latest.default.override { rustToolchain = super. rust-bin.stable.latest.default.override {
targets = [ "wasm32-unknown-unknown" ]; targets = [ "wasm32-unknown-unknown" ];
extensions = [ "rustfmt" "llvm-tools-preview" "rust-src" ]; extensions = [ "rustfmt" "llvm-tools-preview" "rust-src" ];
}; };
}) })
]; (self: super: {
cargo-llvm-cov = super.cargo-llvm-cov.overrideAttrs(oa: {
allSystems = [ doCheck = false; doInstallCheck = false;
"x86_64-linux"
"aarch64-linux"
"x86_64-darwin"
"aarch64-darwin"
];
forAllSystems = f:
nixpkgs.lib.genAttrs allSystems (system:
f {
pkgs = import nixpkgs {
inherit overlays system;
};
system = system;
}); });
in { })
devShells = forAllSystems ({pkgs, ...}: { ];
# Systems supported
allSystems = [
"x86_64-linux" # 64-bit Intel/AMD Linux
"aarch64-linux" # 64-bit ARM Linux
"x86_64-darwin" # 64-bit Intel macOS
"aarch64-darwin" # 64-bit ARM macOS
];
# Helper to provide system-specific attributes
forAllSystems = f: nixpkgs.lib.genAttrs allSystems (system: f {
pkgs = import nixpkgs { inherit overlays system; config.allowBroken = true; };
});
in
{
# Development environment output
devShells = forAllSystems ({ pkgs }: {
default = pkgs.mkShell { default = pkgs.mkShell {
packages = # The Nix packages provided in the environment
(with pkgs; [ packages = (with pkgs; [
# The package provided by our custom overlay. Includes cargo, Clippy, cargo-fmt,
# rustdoc, rustfmt, and other tools.
rustToolchain rustToolchain
cargo-llvm-cov
cargo-nextest cargo-nextest
just just
postgresql.lib postgresql.lib
openssl openssl
pkg-config pkg-config
nodejs_22 nodejs_22
yarn yarn
electron electron
playwright-driver.browsers playwright-driver.browsers
wasm-pack ]) ++ pkgs.lib.optionals pkgs.stdenv.isDarwin (with pkgs; [
python3Full
])
++ pkgs.lib.optionals pkgs.stdenv.isDarwin (with pkgs; [
libiconv libiconv
darwin.apple_sdk.frameworks.Security darwin.apple_sdk.frameworks.Security
]); ]);
@ -70,27 +80,5 @@
NODE_ENV = "development"; NODE_ENV = "development";
}; };
}); });
packages = forAllSystems ({
pkgs,
system,
}: let
naersk-lib = pkgs.callPackage naersk {
cargo = pkgs.rustToolchain;
rustc = pkgs.rustToolchain;
};
in {
kcl-language-server = naersk-lib.buildPackage {
pname = "kcl-language-server";
version = "0.1.0";
release = true;
src = ./rust;
cargoBuildOptions = opt: opt ++ ["-p" "kcl-language-server"];
buildInputs = [pkgs.openssl pkgs.pkg-config];
};
default = self.packages.${system}.kcl-language-server;
});
}; };
} }

View File

@ -234,7 +234,7 @@
"ts-node": "^10.0.0", "ts-node": "^10.0.0",
"typescript": "^5.8.3", "typescript": "^5.8.3",
"typescript-eslint": "^8.29.0", "typescript-eslint": "^8.29.0",
"vite": "^5.4.18", "vite": "^5.4.17",
"vite-plugin-package-version": "^1.1.0", "vite-plugin-package-version": "^1.1.0",
"vite-plugin-top-level-await": "^1.5.0", "vite-plugin-top-level-await": "^1.5.0",
"vite-tsconfig-paths": "^4.3.2", "vite-tsconfig-paths": "^4.3.2",

View File

@ -834,9 +834,9 @@ vite-tsconfig-paths@^5.1.4:
tsconfck "^3.0.3" tsconfck "^3.0.3"
"vite@^5.0.0 || ^6.0.0": "vite@^5.0.0 || ^6.0.0":
version "6.2.6" version "6.2.5"
resolved "https://registry.yarnpkg.com/vite/-/vite-6.2.6.tgz#7f0ccf2fdc0c1eda079ce258508728e2473d3f61" resolved "https://registry.yarnpkg.com/vite/-/vite-6.2.5.tgz#d093b5fe8eb96e594761584a966ab13f24457820"
integrity sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw== integrity sha512-j023J/hCAa4pRIUH6J9HemwYfjB5llR2Ps0CWeikOtdR8+pAURAk0DoJC5/mm9kd+UgdnIy7d6HE4EAvlYhPhA==
dependencies: dependencies:
esbuild "^0.25.0" esbuild "^0.25.0"
postcss "^8.5.3" postcss "^8.5.3"

View File

@ -389,13 +389,6 @@ export class LanguageServerPlugin implements PluginValue {
} }
if (insertText && insertTextFormat === 2) { if (insertText && insertTextFormat === 2) {
// We end with ${} so you can jump to the end of the snippet.
// After the last argument.
// This is not standard from the lsp so we add it here.
if (insertText.endsWith(')')) {
// We have a function its safe to insert the ${} at the end.
insertText = insertText + '${}'
}
return snippetCompletion(insertText, completion) return snippetCompletion(insertText, completion)
} }

View File

@ -45,7 +45,6 @@ export default defineConfig({
[process.env.CI ? 'dot' : 'list'], [process.env.CI ? 'dot' : 'list'],
['json', { outputFile: './test-results/report.json' }], ['json', { outputFile: './test-results/report.json' }],
['html'], ['html'],
['./e2e/playwright/lib/api-reporter.ts'],
], ],
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: { use: {

View File

@ -45,7 +45,6 @@ export default defineConfig({
['dot'], ['dot'],
['json', { outputFile: './test-results/report.json' }], ['json', { outputFile: './test-results/report.json' }],
['html'], ['html'],
['./e2e/playwright/lib/api-reporter.ts'],
], ],
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: { use: {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

After

Width:  |  Height:  |  Size: 58 KiB

48
rust/Cargo.lock generated
View File

@ -1780,7 +1780,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-bumper" name = "kcl-bumper"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@ -1791,7 +1791,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-derive-docs" name = "kcl-derive-docs"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"Inflector", "Inflector",
"anyhow", "anyhow",
@ -1810,7 +1810,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-directory-test-macro" name = "kcl-directory-test-macro"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1819,7 +1819,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-language-server" name = "kcl-language-server"
version = "0.2.60" version = "0.2.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@ -1840,7 +1840,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-language-server-release" name = "kcl-language-server-release"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@ -1860,7 +1860,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-lib" name = "kcl-lib"
version = "0.2.60" version = "0.2.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"approx 0.5.1", "approx 0.5.1",
@ -1928,7 +1928,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-python-bindings" name = "kcl-python-bindings"
version = "0.3.60" version = "0.3.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"kcl-lib", "kcl-lib",
@ -1943,7 +1943,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-test-server" name = "kcl-test-server"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"hyper 0.14.32", "hyper 0.14.32",
@ -1956,7 +1956,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-to-core" name = "kcl-to-core"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1970,7 +1970,7 @@ dependencies = [
[[package]] [[package]]
name = "kcl-wasm-lib" name = "kcl-wasm-lib"
version = "0.1.60" version = "0.1.58"
dependencies = [ dependencies = [
"bson", "bson",
"console_error_panic_hook", "console_error_panic_hook",
@ -2033,9 +2033,9 @@ dependencies = [
[[package]] [[package]]
name = "kittycad-modeling-cmds" name = "kittycad-modeling-cmds"
version = "0.2.113" version = "0.2.110"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa1c927569925425a1b03711617c384a30cb7554394e8a6a01266910b22421de" checksum = "bdfd16800a12a2eaefff53958bd871875c246e669274269f7caefc25d19641ad"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"chrono", "chrono",
@ -2762,9 +2762,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3" name = "pyo3"
version = "0.24.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17da310086b068fbdcefbba30aeb3721d5bb9af8db4987d6735b2183ca567229" checksum = "7f1c6c3591120564d64db2261bec5f910ae454f01def849b9c22835a84695e86"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"indoc", "indoc",
@ -2781,9 +2781,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-build-config" name = "pyo3-build-config"
version = "0.24.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e27165889bd793000a098bb966adc4300c312497ea25cf7a690a9f0ac5aa5fc1" checksum = "e9b6c2b34cf71427ea37c7001aefbaeb85886a074795e35f161f5aecc7620a7a"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"target-lexicon", "target-lexicon",
@ -2791,9 +2791,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-ffi" name = "pyo3-ffi"
version = "0.24.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05280526e1dbf6b420062f3ef228b78c0c54ba94e157f5cb724a609d0f2faabc" checksum = "5507651906a46432cdda02cd02dd0319f6064f1374c9147c45b978621d2c3a9c"
dependencies = [ dependencies = [
"libc", "libc",
"pyo3-build-config", "pyo3-build-config",
@ -2801,9 +2801,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros" name = "pyo3-macros"
version = "0.24.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c3ce5686aa4d3f63359a5100c62a127c9f15e8398e5fdeb5deef1fed5cd5f44" checksum = "b0d394b5b4fd8d97d48336bb0dd2aebabad39f1d294edd6bcd2cccf2eefe6f42"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"pyo3-macros-backend", "pyo3-macros-backend",
@ -2813,9 +2813,9 @@ dependencies = [
[[package]] [[package]]
name = "pyo3-macros-backend" name = "pyo3-macros-backend"
version = "0.24.1" version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4cf6faa0cbfb0ed08e89beb8103ae9724eb4750e3a78084ba4017cbe94f3855" checksum = "fd72da09cfa943b1080f621f024d2ef7e2773df7badd51aa30a2be1f8caa7c8e"
dependencies = [ dependencies = [
"heck", "heck",
"proc-macro2", "proc-macro2",
@ -3956,9 +3956,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.44.2" version = "1.44.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" checksum = "f382da615b842244d4b8738c82ed1275e6c5dd90c459a30941cd07080b06c91a"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",

View File

@ -36,10 +36,10 @@ dashmap = { version = "6.1.0" }
http = "1" http = "1"
indexmap = "2.7.0" indexmap = "2.7.0"
kittycad = { version = "0.3.36", default-features = false, features = ["js", "requests"] } kittycad = { version = "0.3.36", default-features = false, features = ["js", "requests"] }
kittycad-modeling-cmds = { version = "0.2.113", features = ["ts-rs", "websocket"] } kittycad-modeling-cmds = { version = "0.2.110", features = ["ts-rs", "websocket"] }
lazy_static = "1.5.0" lazy_static = "1.5.0"
miette = "7.5.0" miette = "7.5.0"
pyo3 = { version = "0.24.1" } pyo3 = { version = "0.24.0" }
serde = { version = "1", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = { version = "1" } serde_json = { version = "1" }
slog = "2.7.0" slog = "2.7.0"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-bumper" name = "kcl-bumper"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
repository = "https://github.com/KittyCAD/modeling-api" repository = "https://github.com/KittyCAD/modeling-api"
rust-version = "1.76" rust-version = "1.76"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-derive-docs" name = "kcl-derive-docs"
description = "A tool for generating documentation from Rust derive macros" description = "A tool for generating documentation from Rust derive macros"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app" repository = "https://github.com/KittyCAD/modeling-app"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-directory-test-macro" name = "kcl-directory-test-macro"
description = "A tool for generating tests from a directory of kcl files" description = "A tool for generating tests from a directory of kcl files"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app" repository = "https://github.com/KittyCAD/modeling-app"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "kcl-language-server-release" name = "kcl-language-server-release"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
authors = ["KittyCAD Inc <kcl@kittycad.io>"] authors = ["KittyCAD Inc <kcl@kittycad.io>"]
publish = false publish = false

View File

@ -2,7 +2,7 @@
name = "kcl-language-server" name = "kcl-language-server"
description = "A language server for KCL." description = "A language server for KCL."
authors = ["KittyCAD Inc <kcl@kittycad.io>"] authors = ["KittyCAD Inc <kcl@kittycad.io>"]
version = "0.2.60" version = "0.2.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -32,7 +32,7 @@ tracing-subscriber = { workspace = true }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies] [target.'cfg(not(target_arch = "wasm32"))'.dependencies]
signal-hook = "0.3.17" signal-hook = "0.3.17"
tokio = { version = "1.44.2", features = ["full"] } tokio = { version = "1.43.0", features = ["full"] }
tower-lsp = { version = "0.20.0", features = ["proposed"] } tower-lsp = { version = "0.20.0", features = ["proposed"] }
[target.'cfg(target_arch = "wasm32")'.dependencies] [target.'cfg(target_arch = "wasm32")'.dependencies]

View File

@ -9,10 +9,7 @@ export async function createClient(
serverOptions: lc.ServerOptions serverOptions: lc.ServerOptions
): Promise<lc.LanguageClient> { ): Promise<lc.LanguageClient> {
const clientOptions: lc.LanguageClientOptions = { const clientOptions: lc.LanguageClientOptions = {
documentSelector: [ documentSelector: [{ scheme: 'file', language: 'kcl' }],
{ scheme: 'file', language: 'kcl' },
{ scheme: 'untitled', language: 'kcl' },
],
initializationOptions, initializationOptions,
traceOutputChannel, traceOutputChannel,
outputChannel, outputChannel,

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-lib" name = "kcl-lib"
description = "KittyCAD Language implementation and tools" description = "KittyCAD Language implementation and tools"
version = "0.2.60" version = "0.2.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app" repository = "https://github.com/KittyCAD/modeling-app"
@ -103,7 +103,7 @@ tokio-tungstenite = { version = "0.24.0", features = [
tower-lsp = { workspace = true, features = ["proposed", "default"] } tower-lsp = { workspace = true, features = ["proposed", "default"] }
[features] [features]
default = ["cli", "engine"] default = ["engine"]
cli = ["dep:clap", "kittycad/clap"] cli = ["dep:clap", "kittycad/clap"]
dhat-heap = ["dep:dhat"] dhat-heap = ["dep:dhat"]
# For the lsp server, when run with stdout for rpc we want to disable println. # For the lsp server, when run with stdout for rpc we want to disable println.
@ -126,7 +126,7 @@ insta = { version = "1.41.1", features = ["json", "filters", "redactions"] }
kcl-directory-test-macro = { version = "0.1", path = "../kcl-directory-test-macro" } kcl-directory-test-macro = { version = "0.1", path = "../kcl-directory-test-macro" }
miette = { version = "7.5.0", features = ["fancy"] } miette = { version = "7.5.0", features = ["fancy"] }
pretty_assertions = "1.4.1" pretty_assertions = "1.4.1"
tokio = { version = "1.44.2", features = ["rt-multi-thread", "macros", "time"] } tokio = { version = "1.41.1", features = ["rt-multi-thread", "macros", "time"] }
twenty-twenty = "0.8.0" twenty-twenty = "0.8.0"
[lints] [lints]

View File

@ -444,11 +444,12 @@ impl FnData {
} }
} }
#[allow(clippy::literal_string_with_formatting_args)]
pub(super) fn to_autocomplete_snippet(&self) -> String { pub(super) fn to_autocomplete_snippet(&self) -> String {
if self.name == "loft" { if self.name == "loft" {
return "loft([${0:sketch000}, ${1:sketch001}])".to_owned(); return "loft([${0:sketch000}, ${1:sketch001}])${}".to_owned();
} else if self.name == "hole" { } else if self.name == "hole" {
return "hole(${0:holeSketch}, ${1:%})".to_owned(); return "hole(${0:holeSketch}, ${1:%})${}".to_owned();
} }
let mut args = Vec::new(); let mut args = Vec::new();
let mut index = 0; let mut index = 0;
@ -458,7 +459,9 @@ impl FnData {
args.push(arg_str); args.push(arg_str);
} }
} }
format!("{}({})", self.preferred_name, args.join(", ")) // We end with ${} so you can jump to the end of the snippet.
// After the last argument.
format!("{}({})${{}}", self.preferred_name, args.join(", "))
} }
fn to_signature_help(&self) -> SignatureHelp { fn to_signature_help(&self) -> SignatureHelp {

View File

@ -498,17 +498,12 @@ pub trait StdLibFn: std::fmt::Debug + Send + Sync {
}) })
} }
#[allow(clippy::literal_string_with_formatting_args)]
fn to_autocomplete_snippet(&self) -> Result<String> { fn to_autocomplete_snippet(&self) -> Result<String> {
if self.name() == "loft" { if self.name() == "loft" {
return Ok("loft([${0:sketch000}, ${1:sketch001}])".to_string()); return Ok("loft([${0:sketch000}, ${1:sketch001}])${}".to_string());
} else if self.name() == "union" {
return Ok("union([${0:extrude001}, ${1:extrude002}])".to_string());
} else if self.name() == "subtract" {
return Ok("subtract([${0:extrude001}], tools = [${1:extrude002}])".to_string());
} else if self.name() == "intersect" {
return Ok("intersect([${0:extrude001}, ${1:extrude002}])".to_string());
} else if self.name() == "hole" { } else if self.name() == "hole" {
return Ok("hole(${0:holeSketch}, ${1:%})".to_string()); return Ok("hole(${0:holeSketch}, ${1:%})${}".to_string());
} }
let in_keyword_fn = self.keyword_arguments(); let in_keyword_fn = self.keyword_arguments();
let mut args = Vec::new(); let mut args = Vec::new();
@ -519,7 +514,9 @@ pub trait StdLibFn: std::fmt::Debug + Send + Sync {
args.push(arg_str); args.push(arg_str);
} }
} }
Ok(format!("{}({})", self.name(), args.join(", "))) // We end with ${} so you can jump to the end of the snippet.
// After the last argument.
Ok(format!("{}({})${{}}", self.name(), args.join(", ")))
} }
fn to_signature_help(&self) -> SignatureHelp { fn to_signature_help(&self) -> SignatureHelp {
@ -893,26 +890,29 @@ mod tests {
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_line() { fn get_autocomplete_snippet_line() {
let line_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Line); let line_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Line);
let snippet = line_fn.to_autocomplete_snippet().unwrap(); let snippet = line_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"line(${0:%}, end = [${1:3.14}, ${2:3.14}])"#); assert_eq!(snippet, r#"line(${0:%}, end = [${1:3.14}, ${2:3.14}])${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_extrude() { fn get_autocomplete_snippet_extrude() {
let extrude_fn: Box<dyn StdLibFn> = Box::new(crate::std::extrude::Extrude); let extrude_fn: Box<dyn StdLibFn> = Box::new(crate::std::extrude::Extrude);
let snippet = extrude_fn.to_autocomplete_snippet().unwrap(); let snippet = extrude_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"extrude(${0:%}, length = ${1:3.14})"#); assert_eq!(snippet, r#"extrude(${0:%}, length = ${1:3.14})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_fillet() { fn get_autocomplete_snippet_fillet() {
let fillet_fn: Box<dyn StdLibFn> = Box::new(crate::std::fillet::Fillet); let fillet_fn: Box<dyn StdLibFn> = Box::new(crate::std::fillet::Fillet);
let snippet = fillet_fn.to_autocomplete_snippet().unwrap(); let snippet = fillet_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"fillet(${0:%}, radius = ${1:3.14}, tags = [${2:"tag_or_edge_fn"}])"# r#"fillet(${0:%}, radius = ${1:3.14}, tags = [${2:"tag_or_edge_fn"}])${}"#
); );
} }
@ -920,17 +920,18 @@ mod tests {
fn get_autocomplete_snippet_start_sketch_on() { fn get_autocomplete_snippet_start_sketch_on() {
let start_sketch_on_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::StartSketchOn); let start_sketch_on_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::StartSketchOn);
let snippet = start_sketch_on_fn.to_autocomplete_snippet().unwrap(); let snippet = start_sketch_on_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"startSketchOn(${0:"XY"})"#); assert_eq!(snippet, r#"startSketchOn(${0:"XY"})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_pattern_circular_3d() { fn get_autocomplete_snippet_pattern_circular_3d() {
// We test this one specifically because it has ints and floats and strings. // We test this one specifically because it has ints and floats and strings.
let pattern_fn: Box<dyn StdLibFn> = Box::new(crate::std::patterns::PatternCircular3D); let pattern_fn: Box<dyn StdLibFn> = Box::new(crate::std::patterns::PatternCircular3D);
let snippet = pattern_fn.to_autocomplete_snippet().unwrap(); let snippet = pattern_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"patternCircular3d(${0:%}, instances = ${1:10}, axis = [${2:3.14}, ${3:3.14}, ${4:3.14}], center = [${5:3.14}, ${6:3.14}, ${7:3.14}], arcDegrees = ${8:3.14}, rotateDuplicates = ${9:false})"# r#"patternCircular3d(${0:%}, instances = ${1:10}, axis = [${2:3.14}, ${3:3.14}, ${4:3.14}], center = [${5:3.14}, ${6:3.14}, ${7:3.14}], arcDegrees = ${8:3.14}, rotateDuplicates = ${9:false})${}"#
); );
} }
@ -941,10 +942,11 @@ mod tests {
panic!(); panic!();
}; };
let snippet = revolve_fn.to_autocomplete_snippet(); let snippet = revolve_fn.to_autocomplete_snippet();
assert_eq!(snippet, r#"revolve(axis = ${0:X})"#); assert_eq!(snippet, r#"revolve(axis = ${0:X})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_circle() { fn get_autocomplete_snippet_circle() {
let data = kcl_doc::walk_prelude(); let data = kcl_doc::walk_prelude();
let DocData::Fn(circle_fn) = data.into_iter().find(|d| d.name() == "circle").unwrap() else { let DocData::Fn(circle_fn) = data.into_iter().find(|d| d.name() == "circle").unwrap() else {
@ -953,11 +955,12 @@ mod tests {
let snippet = circle_fn.to_autocomplete_snippet(); let snippet = circle_fn.to_autocomplete_snippet();
assert_eq!( assert_eq!(
snippet, snippet,
r#"circle(center = [${0:3.14}, ${1:3.14}], radius = ${2:3.14})"# r#"circle(center = [${0:3.14}, ${1:3.14}], radius = ${2:3.14})${}"#
); );
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_arc() { fn get_autocomplete_snippet_arc() {
let arc_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Arc); let arc_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Arc);
let snippet = arc_fn.to_autocomplete_snippet().unwrap(); let snippet = arc_fn.to_autocomplete_snippet().unwrap();
@ -967,7 +970,7 @@ mod tests {
angleStart = ${0:3.14}, angleStart = ${0:3.14},
angleEnd = ${1:3.14}, angleEnd = ${1:3.14},
radius = ${2:3.14}, radius = ${2:3.14},
}, ${3:%})"# }, ${3:%})${}"#
); );
} }
@ -975,16 +978,17 @@ mod tests {
fn get_autocomplete_snippet_map() { fn get_autocomplete_snippet_map() {
let map_fn: Box<dyn StdLibFn> = Box::new(crate::std::array::Map); let map_fn: Box<dyn StdLibFn> = Box::new(crate::std::array::Map);
let snippet = map_fn.to_autocomplete_snippet().unwrap(); let snippet = map_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"map(${0:[0..9]})"#); assert_eq!(snippet, r#"map(${0:[0..9]})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_pattern_linear_2d() { fn get_autocomplete_snippet_pattern_linear_2d() {
let pattern_fn: Box<dyn StdLibFn> = Box::new(crate::std::patterns::PatternLinear2D); let pattern_fn: Box<dyn StdLibFn> = Box::new(crate::std::patterns::PatternLinear2D);
let snippet = pattern_fn.to_autocomplete_snippet().unwrap(); let snippet = pattern_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"patternLinear2d(${0:%}, instances = ${1:10}, distance = ${2:3.14}, axis = [${3:3.14}, ${4:3.14}])"# r#"patternLinear2d(${0:%}, instances = ${1:10}, distance = ${2:3.14}, axis = [${3:3.14}, ${4:3.14}])${}"#
); );
} }
@ -994,32 +998,36 @@ mod tests {
let snippet = appearance_fn.to_autocomplete_snippet().unwrap(); let snippet = appearance_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"appearance(${0:%}, color = ${1:"#.to_owned() + "\"#" + r#"ff0000"})"# r#"appearance(${0:%}, color = ${1:"#.to_owned() + "\"#" + r#"ff0000"})${}"#
); );
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_loft() { fn get_autocomplete_snippet_loft() {
let loft_fn: Box<dyn StdLibFn> = Box::new(crate::std::loft::Loft); let loft_fn: Box<dyn StdLibFn> = Box::new(crate::std::loft::Loft);
let snippet = loft_fn.to_autocomplete_snippet().unwrap(); let snippet = loft_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"loft([${0:sketch000}, ${1:sketch001}])"#); assert_eq!(snippet, r#"loft([${0:sketch000}, ${1:sketch001}])${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_sweep() { fn get_autocomplete_snippet_sweep() {
let sweep_fn: Box<dyn StdLibFn> = Box::new(crate::std::sweep::Sweep); let sweep_fn: Box<dyn StdLibFn> = Box::new(crate::std::sweep::Sweep);
let snippet = sweep_fn.to_autocomplete_snippet().unwrap(); let snippet = sweep_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"sweep(${0:%}, path = ${1:sketch000})"#); assert_eq!(snippet, r#"sweep(${0:%}, path = ${1:sketch000})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_hole() { fn get_autocomplete_snippet_hole() {
let hole_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Hole); let hole_fn: Box<dyn StdLibFn> = Box::new(crate::std::sketch::Hole);
let snippet = hole_fn.to_autocomplete_snippet().unwrap(); let snippet = hole_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"hole(${0:holeSketch}, ${1:%})"#); assert_eq!(snippet, r#"hole(${0:holeSketch}, ${1:%})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_helix() { fn get_autocomplete_snippet_helix() {
let data = kcl_doc::walk_prelude(); let data = kcl_doc::walk_prelude();
let DocData::Fn(helix_fn) = data.into_iter().find(|d| d.name() == "helix").unwrap() else { let DocData::Fn(helix_fn) = data.into_iter().find(|d| d.name() == "helix").unwrap() else {
@ -1028,32 +1036,36 @@ mod tests {
let snippet = helix_fn.to_autocomplete_snippet(); let snippet = helix_fn.to_autocomplete_snippet();
assert_eq!( assert_eq!(
snippet, snippet,
r#"helix(revolutions = ${0:3.14}, angleStart = ${1:3.14}, radius = ${2:3.14}, axis = ${3:X}, length = ${4:3.14})"# r#"helix(revolutions = ${0:3.14}, angleStart = ${1:3.14}, radius = ${2:3.14}, axis = ${3:X}, length = ${4:3.14})${}"#
); );
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_union() { fn get_autocomplete_snippet_union() {
let union_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Union); let union_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Union);
let snippet = union_fn.to_autocomplete_snippet().unwrap(); let snippet = union_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"union([${0:extrude001}, ${1:extrude002}])"#); assert_eq!(snippet, r#"union(${0:%})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_subtract() { fn get_autocomplete_snippet_subtract() {
let subtract_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Subtract); let subtract_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Subtract);
let snippet = subtract_fn.to_autocomplete_snippet().unwrap(); let snippet = subtract_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"subtract([${0:extrude001}], tools = [${1:extrude002}])"#); assert_eq!(snippet, r#"subtract(${0:%}, tools = ${1:%})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_intersect() { fn get_autocomplete_snippet_intersect() {
let intersect_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Intersect); let intersect_fn: Box<dyn StdLibFn> = Box::new(crate::std::csg::Intersect);
let snippet = intersect_fn.to_autocomplete_snippet().unwrap(); let snippet = intersect_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"intersect([${0:extrude001}, ${1:extrude002}])"#); assert_eq!(snippet, r#"intersect(${0:%})${}"#);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_get_common_edge() { fn get_autocomplete_snippet_get_common_edge() {
let get_common_edge_fn: Box<dyn StdLibFn> = Box::new(crate::std::edge::GetCommonEdge); let get_common_edge_fn: Box<dyn StdLibFn> = Box::new(crate::std::edge::GetCommonEdge);
let snippet = get_common_edge_fn.to_autocomplete_snippet().unwrap(); let snippet = get_common_edge_fn.to_autocomplete_snippet().unwrap();
@ -1061,34 +1073,40 @@ mod tests {
snippet, snippet,
r#"getCommonEdge(faces = [{ r#"getCommonEdge(faces = [{
value = ${0:"string"}, value = ${0:"string"},
}])"# }])${}"#
); );
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_scale() { fn get_autocomplete_snippet_scale() {
let scale_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Scale); let scale_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Scale);
let snippet = scale_fn.to_autocomplete_snippet().unwrap(); let snippet = scale_fn.to_autocomplete_snippet().unwrap();
assert_eq!(snippet, r#"scale(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})"#); assert_eq!(
snippet,
r#"scale(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})${}"#
);
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_translate() { fn get_autocomplete_snippet_translate() {
let translate_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Translate); let translate_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Translate);
let snippet = translate_fn.to_autocomplete_snippet().unwrap(); let snippet = translate_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"translate(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})"# r#"translate(${0:%}, x = ${1:3.14}, y = ${2:3.14}, z = ${3:3.14})${}"#
); );
} }
#[test] #[test]
#[allow(clippy::literal_string_with_formatting_args)]
fn get_autocomplete_snippet_rotate() { fn get_autocomplete_snippet_rotate() {
let rotate_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Rotate); let rotate_fn: Box<dyn StdLibFn> = Box::new(crate::std::transform::Rotate);
let snippet = rotate_fn.to_autocomplete_snippet().unwrap(); let snippet = rotate_fn.to_autocomplete_snippet().unwrap();
assert_eq!( assert_eq!(
snippet, snippet,
r#"rotate(${0:%}, roll = ${1:3.14}, pitch = ${2:3.14}, yaw = ${3:3.14})"# r#"rotate(${0:%}, roll = ${1:3.14}, pitch = ${2:3.14}, yaw = ${3:3.14})${}"#
); );
} }

View File

@ -20,6 +20,7 @@ pub(crate) const SETTINGS_UNIT_ANGLE: &str = "defaultAngleUnit";
pub(super) const NO_PRELUDE: &str = "no_std"; pub(super) const NO_PRELUDE: &str = "no_std";
pub(super) const IMPORT_FORMAT: &str = "format"; pub(super) const IMPORT_FORMAT: &str = "format";
pub(super) const IMPORT_FORMAT_VALUES: [&str; 9] = ["fbx", "gltf", "glb", "obj", "ply", "sldprt", "stp", "step", "stl"];
pub(super) const IMPORT_COORDS: &str = "coords"; pub(super) const IMPORT_COORDS: &str = "coords";
pub(super) const IMPORT_COORDS_VALUES: [(&str, &System); 3] = pub(super) const IMPORT_COORDS_VALUES: [(&str, &System); 3] =
[("zoo", KITTYCAD), ("opengl", OPENGL), ("vulkan", VULKAN)]; [("zoo", KITTYCAD), ("opengl", OPENGL), ("vulkan", VULKAN)];

View File

@ -115,30 +115,6 @@ impl CodeRef {
} }
} }
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS)]
#[ts(export_to = "Artifact.ts")]
#[serde(rename_all = "camelCase")]
pub struct CompositeSolid {
pub id: ArtifactId,
pub sub_type: CompositeSolidSubType,
/// Constituent solids of the composite solid.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub solid_ids: Vec<ArtifactId>,
/// Tool solids used for asymmetric operations like subtract.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tool_ids: Vec<ArtifactId>,
pub code_ref: CodeRef,
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS)]
#[ts(export_to = "Artifact.ts")]
#[serde(rename_all = "camelCase")]
pub enum CompositeSolidSubType {
Intersect,
Subtract,
Union,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS)] #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS)]
#[ts(export_to = "Artifact.ts")] #[ts(export_to = "Artifact.ts")]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -342,7 +318,6 @@ pub struct Helix {
#[ts(export_to = "Artifact.ts")] #[ts(export_to = "Artifact.ts")]
#[serde(tag = "type", rename_all = "camelCase")] #[serde(tag = "type", rename_all = "camelCase")]
pub enum Artifact { pub enum Artifact {
CompositeSolid(CompositeSolid),
Plane(Plane), Plane(Plane),
Path(Path), Path(Path),
Segment(Segment), Segment(Segment),
@ -361,7 +336,6 @@ pub enum Artifact {
impl Artifact { impl Artifact {
pub(crate) fn id(&self) -> ArtifactId { pub(crate) fn id(&self) -> ArtifactId {
match self { match self {
Artifact::CompositeSolid(a) => a.id,
Artifact::Plane(a) => a.id, Artifact::Plane(a) => a.id,
Artifact::Path(a) => a.id, Artifact::Path(a) => a.id,
Artifact::Segment(a) => a.id, Artifact::Segment(a) => a.id,
@ -381,7 +355,6 @@ impl Artifact {
#[expect(dead_code)] #[expect(dead_code)]
pub(crate) fn code_ref(&self) -> Option<&CodeRef> { pub(crate) fn code_ref(&self) -> Option<&CodeRef> {
match self { match self {
Artifact::CompositeSolid(a) => Some(&a.code_ref),
Artifact::Plane(a) => Some(&a.code_ref), Artifact::Plane(a) => Some(&a.code_ref),
Artifact::Path(a) => Some(&a.code_ref), Artifact::Path(a) => Some(&a.code_ref),
Artifact::Segment(a) => Some(&a.code_ref), Artifact::Segment(a) => Some(&a.code_ref),
@ -402,7 +375,6 @@ impl Artifact {
/// type, return the new artifact which should be used as a replacement. /// type, return the new artifact which should be used as a replacement.
fn merge(&mut self, new: Artifact) -> Option<Artifact> { fn merge(&mut self, new: Artifact) -> Option<Artifact> {
match self { match self {
Artifact::CompositeSolid(a) => a.merge(new),
Artifact::Plane(a) => a.merge(new), Artifact::Plane(a) => a.merge(new),
Artifact::Path(a) => a.merge(new), Artifact::Path(a) => a.merge(new),
Artifact::Segment(a) => a.merge(new), Artifact::Segment(a) => a.merge(new),
@ -420,18 +392,6 @@ impl Artifact {
} }
} }
impl CompositeSolid {
fn merge(&mut self, new: Artifact) -> Option<Artifact> {
let Artifact::CompositeSolid(new) = new else {
return Some(new);
};
merge_ids(&mut self.solid_ids, new.solid_ids);
merge_ids(&mut self.tool_ids, new.tool_ids);
None
}
}
impl Plane { impl Plane {
fn merge(&mut self, new: Artifact) -> Option<Artifact> { fn merge(&mut self, new: Artifact) -> Option<Artifact> {
let Artifact::Plane(new) = new else { let Artifact::Plane(new) = new else {
@ -1087,85 +1047,6 @@ fn artifacts_to_update(
// the helix here, but it's not useful right now. // the helix here, but it's not useful right now.
return Ok(return_arr); return Ok(return_arr);
} }
ModelingCmd::BooleanIntersection(_) | ModelingCmd::BooleanSubtract(_) | ModelingCmd::BooleanUnion(_) => {
let (sub_type, solid_ids, tool_ids) = match cmd {
ModelingCmd::BooleanIntersection(intersection) => {
let solid_ids = intersection
.solid_ids
.iter()
.copied()
.map(ArtifactId::new)
.collect::<Vec<_>>();
(CompositeSolidSubType::Intersect, solid_ids, Vec::new())
}
ModelingCmd::BooleanSubtract(subtract) => {
let solid_ids = subtract
.target_ids
.iter()
.copied()
.map(ArtifactId::new)
.collect::<Vec<_>>();
let tool_ids = subtract
.tool_ids
.iter()
.copied()
.map(ArtifactId::new)
.collect::<Vec<_>>();
(CompositeSolidSubType::Subtract, solid_ids, tool_ids)
}
ModelingCmd::BooleanUnion(union) => {
let solid_ids = union.solid_ids.iter().copied().map(ArtifactId::new).collect::<Vec<_>>();
(CompositeSolidSubType::Union, solid_ids, Vec::new())
}
_ => unreachable!(),
};
let mut new_solid_ids = vec![id];
match response {
OkModelingCmdResponse::BooleanIntersection(intersection) => intersection
.extra_solid_ids
.iter()
.copied()
.map(ArtifactId::new)
.for_each(|id| new_solid_ids.push(id)),
OkModelingCmdResponse::BooleanSubtract(subtract) => subtract
.extra_solid_ids
.iter()
.copied()
.map(ArtifactId::new)
.for_each(|id| new_solid_ids.push(id)),
OkModelingCmdResponse::BooleanUnion(union) => union
.extra_solid_ids
.iter()
.copied()
.map(ArtifactId::new)
.for_each(|id| new_solid_ids.push(id)),
_ => {}
}
let return_arr = new_solid_ids
.into_iter()
// Extra solid IDs may include the command's ID. Make sure we
// don't create a duplicate.
.filter(|solid_id| *solid_id != id)
.map(|solid_id| {
Artifact::CompositeSolid(CompositeSolid {
id: solid_id,
sub_type,
solid_ids: solid_ids.clone(),
tool_ids: tool_ids.clone(),
code_ref: CodeRef {
range,
path_to_node: path_to_node.clone(),
},
})
})
.collect::<Vec<_>>();
// TODO: Should we add the reverse graph edges?
return Ok(return_arr);
}
_ => {} _ => {}
} }

View File

@ -67,11 +67,6 @@ impl Artifact {
/// the graph. This should be disjoint with `child_ids`. /// the graph. This should be disjoint with `child_ids`.
pub(crate) fn back_edges(&self) -> Vec<ArtifactId> { pub(crate) fn back_edges(&self) -> Vec<ArtifactId> {
match self { match self {
Artifact::CompositeSolid(a) => {
let mut ids = a.solid_ids.clone();
ids.extend(a.tool_ids.iter());
ids
}
Artifact::Plane(_) => Vec::new(), Artifact::Plane(_) => Vec::new(),
Artifact::Path(a) => vec![a.plane_id], Artifact::Path(a) => vec![a.plane_id],
Artifact::Segment(a) => vec![a.path_id], Artifact::Segment(a) => vec![a.path_id],
@ -92,11 +87,6 @@ impl Artifact {
/// the graph. /// the graph.
pub(crate) fn child_ids(&self) -> Vec<ArtifactId> { pub(crate) fn child_ids(&self) -> Vec<ArtifactId> {
match self { match self {
Artifact::CompositeSolid(_) => {
// Note: Don't include these since they're parents: solid_ids,
// tool_ids.
Vec::new()
}
Artifact::Plane(a) => a.path_ids.clone(), Artifact::Plane(a) => a.path_ids.clone(),
Artifact::Path(a) => { Artifact::Path(a) => {
// Note: Don't include these since they're parents: plane_id. // Note: Don't include these since they're parents: plane_id.
@ -223,7 +213,6 @@ impl ArtifactGraph {
let id = artifact.id(); let id = artifact.id();
let grouped = match artifact { let grouped = match artifact {
Artifact::CompositeSolid(_) => false,
Artifact::Plane(_) => false, Artifact::Plane(_) => false,
Artifact::Path(_) => { Artifact::Path(_) => {
groups.entry(id).or_insert_with(Vec::new).push(id); groups.entry(id).or_insert_with(Vec::new).push(id);
@ -289,15 +278,6 @@ impl ArtifactGraph {
} }
match artifact { match artifact {
Artifact::CompositeSolid(composite_solid) => {
writeln!(
output,
"{prefix}{}[\"CompositeSolid {:?}<br>{:?}\"]",
id,
composite_solid.sub_type,
code_ref_display(&composite_solid.code_ref)
)?;
}
Artifact::Plane(plane) => { Artifact::Plane(plane) => {
writeln!( writeln!(
output, output,

View File

@ -939,26 +939,16 @@ impl Node<BinaryExpression> {
if self.operator == BinaryOperator::Add || self.operator == BinaryOperator::Or { if self.operator == BinaryOperator::Add || self.operator == BinaryOperator::Or {
if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) { if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) {
let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None); let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None);
let result = crate::std::csg::inner_union( let result =
vec![*left.clone(), *right.clone()], crate::std::csg::inner_union(vec![*left.clone(), *right.clone()], exec_state, args).await?;
Default::default(),
exec_state,
args,
)
.await?;
return Ok(result.into()); return Ok(result.into());
} }
} else if self.operator == BinaryOperator::Sub { } else if self.operator == BinaryOperator::Sub {
// Check if we have solids. // Check if we have solids.
if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) { if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) {
let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None); let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None);
let result = crate::std::csg::inner_subtract( let result =
vec![*left.clone()], crate::std::csg::inner_subtract(vec![*left.clone()], vec![*right.clone()], exec_state, args)
vec![*right.clone()],
Default::default(),
exec_state,
args,
)
.await?; .await?;
return Ok(result.into()); return Ok(result.into());
} }
@ -966,13 +956,8 @@ impl Node<BinaryExpression> {
// Check if we have solids. // Check if we have solids.
if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) { if let (KclValue::Solid { value: left }, KclValue::Solid { value: right }) = (&left_value, &right_value) {
let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None); let args = crate::std::Args::new(Default::default(), self.into(), ctx.clone(), None);
let result = crate::std::csg::inner_intersect( let result =
vec![*left.clone(), *right.clone()], crate::std::csg::inner_intersect(vec![*left.clone(), *right.clone()], exec_state, args).await?;
Default::default(),
exec_state,
args,
)
.await?;
return Ok(result.into()); return Ok(result.into());
} }
} }

View File

@ -173,7 +173,7 @@ pub(super) fn format_from_annotations(
KclError::Semantic(KclErrorDetails { KclError::Semantic(KclErrorDetails {
message: format!( message: format!(
"Unknown format for import, expected one of: {}", "Unknown format for import, expected one of: {}",
crate::IMPORT_FILE_EXTENSIONS.join(", ") annotations::IMPORT_FORMAT_VALUES.join(", ")
), ),
source_ranges: vec![p.as_source_range()], source_ranges: vec![p.as_source_range()],
}) })

View File

@ -11,7 +11,9 @@ pub use cache::{bust_cache, clear_mem_cache};
pub use cad_op::Operation; pub use cad_op::Operation;
pub use geometry::*; pub use geometry::*;
pub use id_generator::IdGenerator; pub use id_generator::IdGenerator;
pub(crate) use import::PreImportedGeometry; pub(crate) use import::{
import_foreign, send_to_engine as send_import_to_engine, PreImportedGeometry, ZOO_COORD_SYSTEM,
};
use indexmap::IndexMap; use indexmap::IndexMap;
pub use kcl_value::{KclObjectFields, KclValue}; pub use kcl_value::{KclObjectFields, KclValue};
use kcmc::{ use kcmc::{

View File

@ -131,36 +131,11 @@ pub mod pretty {
pub use crate::{parsing::token::NumericSuffix, unparser::format_number}; pub use crate::{parsing::token::NumericSuffix, unparser::format_number};
} }
#[cfg(feature = "cli")]
use clap::ValueEnum;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[allow(unused_imports)] #[allow(unused_imports)]
use crate::log::{log, logln}; use crate::log::{log, logln};
lazy_static::lazy_static! {
pub static ref IMPORT_FILE_EXTENSIONS: Vec<String> = {
let mut import_file_extensions = vec!["stp".to_string(), "glb".to_string(), "fbxb".to_string()];
#[cfg(feature = "cli")]
let named_extensions = kittycad::types::FileImportFormat::value_variants()
.iter()
.map(|x| format!("{}", x))
.collect::<Vec<String>>();
#[cfg(not(feature = "cli"))]
let named_extensions = vec![]; // We don't really need this outside of the CLI.
// Add all the default import formats.
import_file_extensions.extend_from_slice(&named_extensions);
import_file_extensions
};
pub static ref RELEVANT_FILE_EXTENSIONS: Vec<String> = {
let mut relevant_extensions = IMPORT_FILE_EXTENSIONS.clone();
relevant_extensions.push("kcl".to_string());
relevant_extensions
};
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Program { pub struct Program {
#[serde(flatten)] #[serde(flatten)]

View File

@ -3418,148 +3418,3 @@ async fn kcl_test_kcl_lsp_multi_file_error() {
server.executor_ctx().await.clone().unwrap().close().await; server.executor_ctx().await.clone().unwrap().close().await;
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_kcl_lsp_on_hover_untitled_file_scheme() {
let server = kcl_lsp_server(true).await.unwrap();
// Send open file.
server
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
text_document: tower_lsp::lsp_types::TextDocumentItem {
uri: "untitled:Untitled-1".try_into().unwrap(),
language_id: "kcl".to_string(),
version: 1,
text: r#"startSketchOn(XY)
foo = 42
foo
fn bar(x: string): string {
return x
}
bar("an arg")
startSketchOn(XY)
|> startProfileAt([0, 0], %)
|> line(end = [10, 0])
|> line(end = [0, 10])
"#
.to_string(),
},
})
.await;
// Std lib call
let hover = server
.hover(tower_lsp::lsp_types::HoverParams {
text_document_position_params: tower_lsp::lsp_types::TextDocumentPositionParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "untitled:Untitled-1".try_into().unwrap(),
},
position: tower_lsp::lsp_types::Position { line: 0, character: 2 },
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
match hover.unwrap().contents {
tower_lsp::lsp_types::HoverContents::Markup(tower_lsp::lsp_types::MarkupContent { value, .. }) => {
assert!(value.contains("startSketchOn"));
assert!(value.contains(": SketchSurface"));
assert!(value.contains("Start a new 2-dimensional sketch on a specific"));
}
_ => unreachable!(),
}
// Variable use
let hover = server
.hover(tower_lsp::lsp_types::HoverParams {
text_document_position_params: tower_lsp::lsp_types::TextDocumentPositionParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "untitled:Untitled-1".try_into().unwrap(),
},
position: tower_lsp::lsp_types::Position { line: 2, character: 1 },
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
match hover.unwrap().contents {
tower_lsp::lsp_types::HoverContents::Markup(tower_lsp::lsp_types::MarkupContent { value, .. }) => {
assert!(value.contains("foo: number = 42"));
}
_ => unreachable!(),
}
// User-defined function call.
let hover = server
.hover(tower_lsp::lsp_types::HoverParams {
text_document_position_params: tower_lsp::lsp_types::TextDocumentPositionParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "untitled:Untitled-1".try_into().unwrap(),
},
position: tower_lsp::lsp_types::Position { line: 8, character: 1 },
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
match hover.unwrap().contents {
tower_lsp::lsp_types::HoverContents::Markup(tower_lsp::lsp_types::MarkupContent { value, .. }) => {
assert!(value.contains("bar(x: string): string"));
}
_ => unreachable!(),
}
// Variable inside a function
let hover = server
.hover(tower_lsp::lsp_types::HoverParams {
text_document_position_params: tower_lsp::lsp_types::TextDocumentPositionParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "untitled:Untitled-1".try_into().unwrap(),
},
position: tower_lsp::lsp_types::Position { line: 5, character: 9 },
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
match hover.unwrap().contents {
tower_lsp::lsp_types::HoverContents::Markup(tower_lsp::lsp_types::MarkupContent { value, .. }) => {
assert!(value.contains("x: string"));
}
_ => unreachable!(),
}
// std function KwArg
let hover = server
.hover(tower_lsp::lsp_types::HoverParams {
text_document_position_params: tower_lsp::lsp_types::TextDocumentPositionParams {
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
uri: "untitled:Untitled-1".try_into().unwrap(),
},
position: tower_lsp::lsp_types::Position {
line: 12,
character: 11,
},
},
work_done_progress_params: Default::default(),
})
.await
.unwrap();
match hover.unwrap().contents {
tower_lsp::lsp_types::HoverContents::Markup(tower_lsp::lsp_types::MarkupContent { value, .. }) => {
assert!(value.contains("end?: [number]"));
assert!(value.contains("How far away (along the X and Y axes) should this line go?"));
}
_ => unreachable!(),
}
server.executor_ctx().await.clone().unwrap().close().await;
}

View File

@ -35,7 +35,7 @@ use crate::{
token::{Token, TokenSlice, TokenType}, token::{Token, TokenSlice, TokenType},
PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR, PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR,
}, },
SourceRange, IMPORT_FILE_EXTENSIONS, SourceRange,
}; };
thread_local! { thread_local! {
@ -1803,6 +1803,11 @@ fn import_stmt(i: &mut TokenSlice) -> PResult<BoxNode<ImportStatement>> {
end = alias.end; end = alias.end;
*selector_alias = Some(alias); *selector_alias = Some(alias);
} }
ParseContext::warn(CompilationError::err(
SourceRange::new(start, path.end, path.module_id),
"Importing a whole module is experimental, likely to be buggy, and likely to change",
));
} }
let path_string = match path.inner.value { let path_string = match path.inner.value {
@ -1838,6 +1843,8 @@ fn import_stmt(i: &mut TokenSlice) -> PResult<BoxNode<ImportStatement>> {
)) ))
} }
const FOREIGN_IMPORT_EXTENSIONS: [&str; 8] = ["fbx", "gltf", "glb", "obj", "ply", "sldprt", "step", "stl"];
/// Validates the path string in an `import` statement. /// Validates the path string in an `import` statement.
/// ///
/// `var_name` is `true` if the path will be used as a variable name. /// `var_name` is `true` if the path will be used as a variable name.
@ -1902,11 +1909,12 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
ImportPath::Std { path: segments } ImportPath::Std { path: segments }
} else if path_string.contains('.') { } else if path_string.contains('.') {
let extn = std::path::Path::new(&path_string).extension().unwrap_or_default(); // TODO should allow other extensions if there is a format attribute.
if !IMPORT_FILE_EXTENSIONS.contains(&extn.to_string_lossy().to_string()) { let extn = &path_string[path_string.rfind('.').unwrap() + 1..];
if !FOREIGN_IMPORT_EXTENSIONS.contains(&extn) {
ParseContext::warn(CompilationError::err( ParseContext::warn(CompilationError::err(
path_range, path_range,
format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", IMPORT_FILE_EXTENSIONS.join(", ")), format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", FOREIGN_IMPORT_EXTENSIONS.join(", ")),
)) ))
} }
ImportPath::Foreign { path: path_string } ImportPath::Foreign { path: path_string }
@ -1914,7 +1922,7 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
return Err(ErrMode::Cut( return Err(ErrMode::Cut(
CompilationError::fatal( CompilationError::fatal(
path_range, path_range,
format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", IMPORT_FILE_EXTENSIONS.join(", ")), format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", FOREIGN_IMPORT_EXTENSIONS.join(", ")),
) )
.into(), .into(),
)); ));
@ -4490,9 +4498,21 @@ export fn cos(num: number(rad)): number(_) {}"#;
#[test] #[test]
fn warn_import() { fn warn_import() {
let some_program_string = r#"import "foo.bad""#; let some_program_string = r#"import "foo.kcl""#;
let (_, errs) = assert_no_err(some_program_string); let (_, errs) = assert_no_err(some_program_string);
assert_eq!(errs.len(), 1, "{errs:#?}"); assert_eq!(errs.len(), 1, "{errs:#?}");
let some_program_string = r#"import "foo.obj""#;
let (_, errs) = assert_no_err(some_program_string);
assert_eq!(errs.len(), 1, "{errs:#?}");
let some_program_string = r#"import "foo.sldprt""#;
let (_, errs) = assert_no_err(some_program_string);
assert_eq!(errs.len(), 1, "{errs:#?}");
let some_program_string = r#"import "foo.bad""#;
let (_, errs) = assert_no_err(some_program_string);
assert_eq!(errs.len(), 2, "{errs:#?}");
} }
#[test] #[test]

View File

@ -664,6 +664,10 @@ impl Args {
FromArgs::from_args(self, 0) FromArgs::from_args(self, 0)
} }
pub(crate) fn get_import_data(&self) -> Result<(String, Option<crate::std::import::ImportFormat>), KclError> {
FromArgs::from_args(self, 0)
}
pub(crate) fn get_sketch_data_and_optional_tag( pub(crate) fn get_sketch_data_and_optional_tag(
&self, &self,
) -> Result<(super::sketch::SketchData, Option<FaceTag>), KclError> { ) -> Result<(super::sketch::SketchData, Option<FaceTag>), KclError> {
@ -1073,6 +1077,35 @@ macro_rules! let_field_of {
}; };
} }
impl<'a> FromKclValue<'a> for crate::std::import::ImportFormat {
fn from_kcl_val(arg: &'a KclValue) -> Option<Self> {
let obj = arg.as_object()?;
let_field_of!(obj, typ "format");
match typ {
"fbx" => Some(Self::Fbx {}),
"gltf" => Some(Self::Gltf {}),
"sldprt" => Some(Self::Sldprt {}),
"step" => Some(Self::Step {}),
"stl" => {
let_field_of!(obj, coords?);
let_field_of!(obj, units);
Some(Self::Stl { coords, units })
}
"obj" => {
let_field_of!(obj, coords?);
let_field_of!(obj, units);
Some(Self::Obj { coords, units })
}
"ply" => {
let_field_of!(obj, coords?);
let_field_of!(obj, units);
Some(Self::Ply { coords, units })
}
_ => None,
}
}
}
impl<'a> FromKclValue<'a> for super::sketch::AngledLineThatIntersectsData { impl<'a> FromKclValue<'a> for super::sketch::AngledLineThatIntersectsData {
fn from_kcl_val(arg: &'a KclValue) -> Option<Self> { fn from_kcl_val(arg: &'a KclValue) -> Option<Self> {
let obj = arg.as_object()?; let obj = arg.as_object()?;

View File

@ -2,13 +2,6 @@
use anyhow::Result; use anyhow::Result;
use kcl_derive_docs::stdlib; use kcl_derive_docs::stdlib;
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, ModelingCmd};
use kittycad_modeling_cmds::{
self as kcmc,
ok_response::OkModelingCmdResponse,
output::{BooleanIntersection, BooleanSubtract, BooleanUnion},
websocket::OkWebSocketResponseData,
};
use crate::{ use crate::{
errors::{KclError, KclErrorDetails}, errors::{KclError, KclErrorDetails},
@ -16,13 +9,10 @@ use crate::{
std::Args, std::Args,
}; };
use super::DEFAULT_TOLERANCE;
/// Union two or more solids into a single solid. /// Union two or more solids into a single solid.
pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> { pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> {
let solids: Vec<Solid> = let solids: Vec<Solid> =
args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::Union(vec![RuntimeType::solids()]), exec_state)?; args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::Union(vec![RuntimeType::solids()]), exec_state)?;
let tolerance = args.get_kw_arg_opt("tolerance")?;
if solids.len() < 2 { if solids.len() < 2 {
return Err(KclError::UndefinedValue(KclErrorDetails { return Err(KclError::UndefinedValue(KclErrorDetails {
@ -31,7 +21,7 @@ pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
})); }));
} }
let solids = inner_union(solids, tolerance, exec_state, args).await?; let solids = inner_union(solids, exec_state, args).await?;
Ok(solids.into()) Ok(solids.into())
} }
@ -40,19 +30,18 @@ pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
/// ```no_run /// ```no_run
/// // Union two cubes using the stdlib functions. /// // Union two cubes using the stdlib functions.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = cube([20, 10])
/// |> translate(z = 1)
/// ///
/// unionedPart = union([part001, part002]) /// unionedPart = union([part001, part002])
/// ``` /// ```
@ -62,19 +51,18 @@ pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
/// // NOTE: This will not work when using codemods through the UI. /// // NOTE: This will not work when using codemods through the UI.
/// // Codemods will generate the stdlib function call instead. /// // Codemods will generate the stdlib function call instead.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = cube([20, 10])
/// |> translate(z = 1)
/// ///
/// // This is the equivalent of: union([part001, part002]) /// // This is the equivalent of: union([part001, part002])
/// unionedPart = part001 + part002 /// unionedPart = part001 + part002
@ -85,19 +73,18 @@ pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
/// // NOTE: This will not work when using codemods through the UI. /// // NOTE: This will not work when using codemods through the UI.
/// // Codemods will generate the stdlib function call instead. /// // Codemods will generate the stdlib function call instead.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = cube([20, 10])
/// |> translate(z = 1)
/// ///
/// // This is the equivalent of: union([part001, part002]) /// // This is the equivalent of: union([part001, part002])
/// // Programmers will understand `|` as a union operation, but mechanical engineers /// // Programmers will understand `|` as a union operation, but mechanical engineers
@ -109,64 +96,31 @@ pub async fn union(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
feature_tree_operation = true, feature_tree_operation = true,
keywords = true, keywords = true,
unlabeled_first = true, unlabeled_first = true,
deprecated = true,
args = { args = {
solids = {docs = "The solids to union."}, solids = {docs = "The solids to union."},
tolerance = {docs = "The tolerance to use for the union operation."},
} }
}] }]
pub(crate) async fn inner_union( pub(crate) async fn inner_union(
solids: Vec<Solid>, solids: Vec<Solid>,
tolerance: Option<f64>,
exec_state: &mut ExecState, exec_state: &mut ExecState,
args: Args, args: Args,
) -> Result<Vec<Solid>, KclError> { ) -> Result<Vec<Solid>, KclError> {
let solid_out_id = exec_state.next_uuid();
let mut solid = solids[0].clone();
solid.id = solid_out_id;
let mut new_solids = vec![solid.clone()];
if args.ctx.no_engine_commands().await {
return Ok(new_solids);
}
// Flush the fillets for the solids. // Flush the fillets for the solids.
args.flush_batch_for_solids(exec_state, &solids).await?; args.flush_batch_for_solids(exec_state, &solids).await?;
let result = args // TODO: call the engine union operation.
.send_modeling_cmd( // TODO: figure out all the shit after for the faces etc.
solid_out_id,
ModelingCmd::from(mcmd::BooleanUnion {
solid_ids: solids.iter().map(|s| s.id).collect(),
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
}),
)
.await?;
let OkWebSocketResponseData::Modeling { // For now just return the first solid.
modeling_response: OkModelingCmdResponse::BooleanUnion(BooleanUnion { extra_solid_ids }), // Til we have a proper implementation.
} = result Ok(vec![solids[0].clone()])
else {
return Err(KclError::Internal(KclErrorDetails {
message: "Failed to get the result of the union operation.".to_string(),
source_ranges: vec![args.source_range],
}));
};
// If we have more solids, set those as well.
if !extra_solid_ids.is_empty() {
solid.id = extra_solid_ids[0];
new_solids.push(solid.clone());
}
Ok(new_solids)
} }
/// Intersect returns the shared volume between multiple solids, preserving only /// Intersect returns the shared volume between multiple solids, preserving only
/// overlapping regions. /// overlapping regions.
pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> { pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> {
let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?; let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?;
let tolerance = args.get_kw_arg_opt("tolerance")?;
if solids.len() < 2 { if solids.len() < 2 {
return Err(KclError::UndefinedValue(KclErrorDetails { return Err(KclError::UndefinedValue(KclErrorDetails {
@ -175,7 +129,7 @@ pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValu
})); }));
} }
let solids = inner_intersect(solids, tolerance, exec_state, args).await?; let solids = inner_intersect(solids, exec_state, args).await?;
Ok(solids.into()) Ok(solids.into())
} }
@ -190,19 +144,18 @@ pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValu
/// ```no_run /// ```no_run
/// // Intersect two cubes using the stdlib functions. /// // Intersect two cubes using the stdlib functions.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = cube([8, 8])
/// |> translate(z = 1)
/// ///
/// intersectedPart = intersect([part001, part002]) /// intersectedPart = intersect([part001, part002])
/// ``` /// ```
@ -212,19 +165,18 @@ pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValu
/// // NOTE: This will not work when using codemods through the UI. /// // NOTE: This will not work when using codemods through the UI.
/// // Codemods will generate the stdlib function call instead. /// // Codemods will generate the stdlib function call instead.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = cube([8, 8])
/// |> translate(z = 1)
/// ///
/// // This is the equivalent of: intersect([part001, part002]) /// // This is the equivalent of: intersect([part001, part002])
/// intersectedPart = part001 & part002 /// intersectedPart = part001 & part002
@ -234,57 +186,25 @@ pub async fn intersect(exec_state: &mut ExecState, args: Args) -> Result<KclValu
feature_tree_operation = true, feature_tree_operation = true,
keywords = true, keywords = true,
unlabeled_first = true, unlabeled_first = true,
deprecated = true,
args = { args = {
solids = {docs = "The solids to intersect."}, solids = {docs = "The solids to intersect."},
tolerance = {docs = "The tolerance to use for the intersection operation."},
} }
}] }]
pub(crate) async fn inner_intersect( pub(crate) async fn inner_intersect(
solids: Vec<Solid>, solids: Vec<Solid>,
tolerance: Option<f64>,
exec_state: &mut ExecState, exec_state: &mut ExecState,
args: Args, args: Args,
) -> Result<Vec<Solid>, KclError> { ) -> Result<Vec<Solid>, KclError> {
let solid_out_id = exec_state.next_uuid();
let mut solid = solids[0].clone();
solid.id = solid_out_id;
let mut new_solids = vec![solid.clone()];
if args.ctx.no_engine_commands().await {
return Ok(new_solids);
}
// Flush the fillets for the solids. // Flush the fillets for the solids.
args.flush_batch_for_solids(exec_state, &solids).await?; args.flush_batch_for_solids(exec_state, &solids).await?;
let result = args // TODO: call the engine union operation.
.send_modeling_cmd( // TODO: figure out all the shit after for the faces etc.
solid_out_id,
ModelingCmd::from(mcmd::BooleanIntersection {
solid_ids: solids.iter().map(|s| s.id).collect(),
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
}),
)
.await?;
let OkWebSocketResponseData::Modeling { // For now just return the first solid.
modeling_response: OkModelingCmdResponse::BooleanIntersection(BooleanIntersection { extra_solid_ids }), // Til we have a proper implementation.
} = result Ok(vec![solids[0].clone()])
else {
return Err(KclError::Internal(KclErrorDetails {
message: "Failed to get the result of the intersection operation.".to_string(),
source_ranges: vec![args.source_range],
}));
};
// If we have more solids, set those as well.
if !extra_solid_ids.is_empty() {
solid.id = extra_solid_ids[0];
new_solids.push(solid.clone());
}
Ok(new_solids)
} }
/// Subtract removes tool solids from base solids, leaving the remaining material. /// Subtract removes tool solids from base solids, leaving the remaining material.
@ -292,23 +212,7 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?; let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?;
let tools: Vec<Solid> = args.get_kw_arg_typed("tools", &RuntimeType::solids(), exec_state)?; let tools: Vec<Solid> = args.get_kw_arg_typed("tools", &RuntimeType::solids(), exec_state)?;
if solids.len() > 1 { let solids = inner_subtract(solids, tools, exec_state, args).await?;
return Err(KclError::UndefinedValue(KclErrorDetails {
message: "Only one solid is allowed for a subtract operation, currently.".to_string(),
source_ranges: vec![args.source_range],
}));
}
if tools.len() > 1 {
return Err(KclError::UndefinedValue(KclErrorDetails {
message: "Only one tool is allowed for a subtract operation, currently.".to_string(),
source_ranges: vec![args.source_range],
}));
}
let tolerance = args.get_kw_arg_opt("tolerance")?;
let solids = inner_subtract(solids, tools, tolerance, exec_state, args).await?;
Ok(solids.into()) Ok(solids.into())
} }
@ -323,19 +227,20 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
/// ```no_run /// ```no_run
/// // Subtract a cylinder from a cube using the stdlib functions. /// // Subtract a cylinder from a cube using the stdlib functions.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = startSketchOn('XY')
/// |> translate(z = 1) /// |> circle(center = [0, 0], radius = 2)
/// |> extrude(length = 10)
/// ///
/// subtractedPart = subtract([part001], tools=[part002]) /// subtractedPart = subtract([part001], tools=[part002])
/// ``` /// ```
@ -345,19 +250,20 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
/// // NOTE: This will not work when using codemods through the UI. /// // NOTE: This will not work when using codemods through the UI.
/// // Codemods will generate the stdlib function call instead. /// // Codemods will generate the stdlib function call instead.
/// ///
/// fn cube(center, size) { /// fn cube(center) {
/// return startSketchOn('XY') /// return startSketchOn('XY')
/// |> startProfileAt([center[0] - size, center[1] - size], %) /// |> startProfileAt([center[0] - 10, center[1] - 10], %)
/// |> line(endAbsolute = [center[0] + size, center[1] - size]) /// |> line(endAbsolute = [center[0] + 10, center[1] - 10])
/// |> line(endAbsolute = [center[0] + size, center[1] + size]) /// |> line(endAbsolute = [center[0] + 10, center[1] + 10])
/// |> line(endAbsolute = [center[0] - size, center[1] + size]) /// |> line(endAbsolute = [center[0] - 10, center[1] + 10])
/// |> close() /// |> close()
/// |> extrude(length = 10) /// |> extrude(length = 10)
/// } /// }
/// ///
/// part001 = cube([0, 0], 10) /// part001 = cube([0, 0])
/// part002 = cube([7, 3], 5) /// part002 = startSketchOn('XY')
/// |> translate(z = 1) /// |> circle(center = [0, 0], radius = 2)
/// |> extrude(length = 10)
/// ///
/// // This is the equivalent of: subtract([part001], tools=[part002]) /// // This is the equivalent of: subtract([part001], tools=[part002])
/// subtractedPart = part001 - part002 /// subtractedPart = part001 - part002
@ -367,59 +273,26 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
feature_tree_operation = true, feature_tree_operation = true,
keywords = true, keywords = true,
unlabeled_first = true, unlabeled_first = true,
deprecated = true,
args = { args = {
solids = {docs = "The solids to use as the base to subtract from."}, solids = {docs = "The solids to use as the base to subtract from."},
tools = {docs = "The solids to subtract."}, tools = {docs = "The solids to subtract."},
tolerance = {docs = "The tolerance to use for the subtraction operation."},
} }
}] }]
pub(crate) async fn inner_subtract( pub(crate) async fn inner_subtract(
solids: Vec<Solid>, solids: Vec<Solid>,
tools: Vec<Solid>, tools: Vec<Solid>,
tolerance: Option<f64>,
exec_state: &mut ExecState, exec_state: &mut ExecState,
args: Args, args: Args,
) -> Result<Vec<Solid>, KclError> { ) -> Result<Vec<Solid>, KclError> {
let solid_out_id = exec_state.next_uuid();
let mut solid = solids[0].clone();
solid.id = solid_out_id;
let mut new_solids = vec![solid.clone()];
if args.ctx.no_engine_commands().await {
return Ok(new_solids);
}
// Flush the fillets for the solids and the tools. // Flush the fillets for the solids and the tools.
let combined_solids = solids.iter().chain(tools.iter()).cloned().collect::<Vec<Solid>>(); let combined_solids = solids.iter().chain(tools.iter()).cloned().collect::<Vec<Solid>>();
args.flush_batch_for_solids(exec_state, &combined_solids).await?; args.flush_batch_for_solids(exec_state, &combined_solids).await?;
let result = args // TODO: call the engine union operation.
.send_modeling_cmd( // TODO: figure out all the shit after for the faces etc.
solid_out_id,
ModelingCmd::from(mcmd::BooleanSubtract {
target_ids: solids.iter().map(|s| s.id).collect(),
tool_ids: tools.iter().map(|s| s.id).collect(),
tolerance: LengthUnit(tolerance.unwrap_or(DEFAULT_TOLERANCE)),
}),
)
.await?;
let OkWebSocketResponseData::Modeling { // For now just return the first solid.
modeling_response: OkModelingCmdResponse::BooleanSubtract(BooleanSubtract { extra_solid_ids }), // Til we have a proper implementation.
} = result Ok(vec![solids[0].clone()])
else {
return Err(KclError::Internal(KclErrorDetails {
message: "Failed to get the result of the subtract operation.".to_string(),
source_ranges: vec![args.source_range],
}));
};
// If we have more solids, set those as well.
if !extra_solid_ids.is_empty() {
solid.id = extra_solid_ids[0];
new_solids.push(solid.clone());
}
Ok(new_solids)
} }

View File

@ -0,0 +1,181 @@
//! Standard library functions involved in importing files.
use anyhow::Result;
use kcl_derive_docs::stdlib;
use kcmc::{coord::System, format::InputFormat3d, units::UnitLength};
use kittycad_modeling_cmds as kcmc;
use crate::{
errors::{KclError, KclErrorDetails},
execution::{import_foreign, send_import_to_engine, ExecState, ImportedGeometry, KclValue, ZOO_COORD_SYSTEM},
std::Args,
};
/// Import format specifier
#[derive(serde :: Serialize, serde :: Deserialize, PartialEq, Debug, Clone, schemars :: JsonSchema)]
#[cfg_attr(feature = "tabled", derive(tabled::Tabled))]
#[serde(tag = "format")]
pub enum ImportFormat {
/// Autodesk Filmbox (FBX) format
#[serde(rename = "fbx")]
Fbx {},
/// Binary glTF 2.0. We refer to this as glTF since that is how our customers refer to
/// it, but this can also import binary glTF (glb).
#[serde(rename = "gltf")]
Gltf {},
/// Wavefront OBJ format.
#[serde(rename = "obj")]
Obj {
/// Co-ordinate system of input data.
/// Defaults to the [KittyCAD co-ordinate system.
coords: Option<System>,
/// The units of the input data. This is very important for correct scaling and when
/// calculating physics properties like mass, etc.
/// Defaults to millimeters.
units: UnitLength,
},
/// The PLY Polygon File Format.
#[serde(rename = "ply")]
Ply {
/// Co-ordinate system of input data.
/// Defaults to the [KittyCAD co-ordinate system.
coords: Option<System>,
/// The units of the input data. This is very important for correct scaling and when
/// calculating physics properties like mass, etc.
/// Defaults to millimeters.
units: UnitLength,
},
/// SolidWorks part (SLDPRT) format.
#[serde(rename = "sldprt")]
Sldprt {},
/// ISO 10303-21 (STEP) format.
#[serde(rename = "step")]
Step {},
/// *ST**ereo**L**ithography format.
#[serde(rename = "stl")]
Stl {
/// Co-ordinate system of input data.
/// Defaults to the [KittyCAD co-ordinate system.
coords: Option<System>,
/// The units of the input data. This is very important for correct scaling and when
/// calculating physics properties like mass, etc.
/// Defaults to millimeters.
units: UnitLength,
},
}
impl From<ImportFormat> for InputFormat3d {
fn from(format: ImportFormat) -> Self {
match format {
ImportFormat::Fbx {} => InputFormat3d::Fbx(Default::default()),
ImportFormat::Gltf {} => InputFormat3d::Gltf(Default::default()),
ImportFormat::Obj { coords, units } => InputFormat3d::Obj(kcmc::format::obj::import::Options {
coords: coords.unwrap_or(ZOO_COORD_SYSTEM),
units,
}),
ImportFormat::Ply { coords, units } => InputFormat3d::Ply(kcmc::format::ply::import::Options {
coords: coords.unwrap_or(ZOO_COORD_SYSTEM),
units,
}),
ImportFormat::Sldprt {} => InputFormat3d::Sldprt(kcmc::format::sldprt::import::Options {
split_closed_faces: false,
}),
ImportFormat::Step {} => InputFormat3d::Step(kcmc::format::step::import::Options {
split_closed_faces: false,
}),
ImportFormat::Stl { coords, units } => InputFormat3d::Stl(kcmc::format::stl::import::Options {
coords: coords.unwrap_or(ZOO_COORD_SYSTEM),
units,
}),
}
}
}
/// Import a CAD file.
/// For formats lacking unit data (STL, OBJ, PLY), the default import unit is millimeters.
/// Otherwise you can specify the unit by passing in the options parameter.
/// If you import a gltf file, we will try to find the bin file and import it as well.
///
/// Import paths are relative to the current project directory. This only works in the desktop app
/// not in browser.
pub async fn import(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> {
let (file_path, options): (String, Option<ImportFormat>) = args.get_import_data()?;
let imported_geometry = inner_import(file_path, options, exec_state, args).await?;
Ok(KclValue::ImportedGeometry(imported_geometry))
}
/// Import a CAD file.
///
/// **DEPRECATED** Prefer to use import statements.
///
/// For formats lacking unit data (such as STL, OBJ, or PLY files), the default
/// unit of measurement is millimeters. Alternatively you may specify the unit
/// by passing your desired measurement unit in the options parameter. When
/// importing a GLTF file, the bin file will be imported as well. Import paths
/// are relative to the current project directory.
///
/// Note: The import command currently only works when using the native
/// Design Studio.
///
/// ```no_run
/// model = import("tests/inputs/cube.obj")
/// ```
///
/// ```no_run
/// model = import("tests/inputs/cube.obj", {format: "obj", units: "m"})
/// ```
///
/// ```no_run
/// model = import("tests/inputs/cube.gltf")
/// ```
///
/// ```no_run
/// model = import("tests/inputs/cube.sldprt")
/// ```
///
/// ```no_run
/// model = import("tests/inputs/cube.step")
/// ```
///
/// ```no_run
/// import height, buildSketch from 'common.kcl'
///
/// plane = 'XZ'
/// margin = 2
/// s1 = buildSketch(plane, [0, 0])
/// s2 = buildSketch(plane, [0, height() + margin])
/// ```
#[stdlib {
name = "import",
feature_tree_operation = true,
deprecated = true,
tags = [],
}]
async fn inner_import(
file_path: String,
options: Option<ImportFormat>,
exec_state: &mut ExecState,
args: Args,
) -> Result<ImportedGeometry, KclError> {
if file_path.is_empty() {
return Err(KclError::Semantic(KclErrorDetails {
message: "No file path was provided.".to_string(),
source_ranges: vec![args.source_range],
}));
}
let format = options.map(InputFormat3d::from);
send_import_to_engine(
import_foreign(
std::path::Path::new(&file_path),
format,
exec_state,
&args.ctx,
args.source_range,
)
.await?,
&args.ctx,
)
.await
}

View File

@ -12,6 +12,7 @@ pub mod edge;
pub mod extrude; pub mod extrude;
pub mod fillet; pub mod fillet;
pub mod helix; pub mod helix;
pub mod import;
pub mod loft; pub mod loft;
pub mod math; pub mod math;
pub mod mirror; pub mod mirror;
@ -110,6 +111,7 @@ lazy_static! {
Box::new(crate::std::sweep::Sweep), Box::new(crate::std::sweep::Sweep),
Box::new(crate::std::loft::Loft), Box::new(crate::std::loft::Loft),
Box::new(crate::std::planes::OffsetPlane), Box::new(crate::std::planes::OffsetPlane),
Box::new(crate::std::import::Import),
Box::new(crate::std::math::Acos), Box::new(crate::std::math::Acos),
Box::new(crate::std::math::Asin), Box::new(crate::std::math::Asin),
Box::new(crate::std::math::Atan), Box::new(crate::std::math::Atan),

View File

@ -234,37 +234,40 @@ pub fn is_on_circumference(center: Point2d, point: Point2d, radius: f64) -> bool
(distance_squared - radius.powi(2)).abs() < 1e-9 (distance_squared - radius.powi(2)).abs() < 1e-9
} }
// Calculate the center of 3 points using an algebraic method // Calculate the center of 3 points
// Handles if 3 points lie on the same line (collinear) by returning the average of the points (could return None instead..) // To calculate the center of the 3 point circle 2 perpendicular lines are created
// These perpendicular lines will intersect at the center of the circle.
pub fn calculate_circle_center(p1: [f64; 2], p2: [f64; 2], p3: [f64; 2]) -> [f64; 2] { pub fn calculate_circle_center(p1: [f64; 2], p2: [f64; 2], p3: [f64; 2]) -> [f64; 2] {
let (x1, y1) = (p1[0], p1[1]); // y2 - y1
let (x2, y2) = (p2[0], p2[1]); let y_2_1 = p2[1] - p1[1];
let (x3, y3) = (p3[0], p3[1]); // y3 - y2
let y_3_2 = p3[1] - p2[1];
// x2 - x1
let x_2_1 = p2[0] - p1[0];
// x3 - x2
let x_3_2 = p3[0] - p2[0];
// Compute the determinant d = 2 * (x1*(y2-y3) + x2*(y3-y1) + x3*(y1-y2)) // Slope of two perpendicular lines
// Visually d is twice the area of the triangle formed by the points, let slope_a = y_2_1 / x_2_1;
// also the same as: cross(p2 - p1, p3 - p1) let slope_b = y_3_2 / x_3_2;
let d = 2.0 * (x1 * (y2 - y3) + x2 * (y3 - y1) + x3 * (y1 - y2));
// If d is nearly zero, the points are collinear, and a unique circle cannot be defined. // Values for line intersection
if d.abs() < f64::EPSILON { // y1 - y3
return [(x1 + x2 + x3) / 3.0, (y1 + y2 + y3) / 3.0]; let y_1_3 = p1[1] - p3[1];
} // x1 + x2
let x_1_2 = p1[0] + p2[0];
// x2 + x3
let x_2_3 = p2[0] + p3[0];
// y1 + y2
let y_1_2 = p1[1] + p2[1];
// squared lengths // Solve for the intersection of these two lines
let p1_sq = x1 * x1 + y1 * y1; let numerator = (slope_a * slope_b * y_1_3) + (slope_b * x_1_2) - (slope_a * x_2_3);
let p2_sq = x2 * x2 + y2 * y2; let x = numerator / (2.0 * (slope_b - slope_a));
let p3_sq = x3 * x3 + y3 * y3;
// This formula is derived from the circle equations: let y = ((-1.0 / slope_a) * (x - (x_1_2 / 2.0))) + (y_1_2 / 2.0);
// (x - cx)^2 + (y - cy)^2 = r^2
// All 3 points will satisfy this equation, so we have 3 equations. Radius can be eliminated [x, y]
// by subtracting one of the equations from the other two and the remaining 2 equations can
// be solved for cx and cy.
[
(p1_sq * (y2 - y3) + p2_sq * (y3 - y1) + p3_sq * (y1 - y2)) / d,
(p1_sq * (x3 - x2) + p2_sq * (x1 - x3) + p3_sq * (x2 - x1)) / d,
]
} }
pub struct CircleParams { pub struct CircleParams {
@ -283,11 +286,9 @@ pub fn calculate_circle_from_3_points(points: [Point2d; 3]) -> CircleParams {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
// Here you can bring your functions into scope // Here you can bring your functions into scope
use approx::assert_relative_eq;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use std::f64::consts::TAU;
use super::{calculate_circle_center, get_x_component, get_y_component, Angle}; use super::{get_x_component, get_y_component, Angle};
use crate::SourceRange; use crate::SourceRange;
static EACH_QUAD: [(i32, [i32; 2]); 12] = [ static EACH_QUAD: [(i32, [i32; 2]); 12] = [
@ -452,75 +453,6 @@ mod tests {
assert_eq!(angle_start.to_degrees().round(), 0.0); assert_eq!(angle_start.to_degrees().round(), 0.0);
assert_eq!(angle_end.to_degrees().round(), 180.0); assert_eq!(angle_end.to_degrees().round(), 180.0);
} }
#[test]
fn test_calculate_circle_center() {
const EPS: f64 = 1e-4;
// Test: circle center = (4.1, 1.9)
let p1 = [1.0, 2.0];
let p2 = [4.0, 5.0];
let p3 = [7.0, 3.0];
let center = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(center[0], 4.1, epsilon = EPS);
assert_relative_eq!(center[1], 1.9, epsilon = EPS);
// Tests: Generate a few circles and test its points
let center = [3.2, 0.7];
let radius_array = [0.001, 0.01, 0.6, 1.0, 5.0, 60.0, 500.0, 2000.0, 400_000.0];
let points_array = [[0.0, 0.33, 0.66], [0.0, 0.1, 0.2], [0.0, -0.1, 0.1], [0.0, 0.5, 0.7]];
let get_point = |radius: f64, t: f64| {
let angle = t * TAU;
[center[0] + radius * angle.cos(), center[1] + radius * angle.sin()]
};
for radius in radius_array {
for point in points_array {
let p1 = get_point(radius, point[0]);
let p2 = get_point(radius, point[1]);
let p3 = get_point(radius, point[2]);
let c = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(c[0], center[0], epsilon = EPS);
assert_relative_eq!(c[1], center[1], epsilon = EPS);
}
}
// Test: Equilateral triangle
let p1 = [0.0, 0.0];
let p2 = [1.0, 0.0];
let p3 = [0.5, 3.0_f64.sqrt() / 2.0];
let center = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(center[0], 0.5, epsilon = EPS);
assert_relative_eq!(center[1], 1.0 / (2.0 * 3.0_f64.sqrt()), epsilon = EPS);
// Test: Collinear points (should return the average of the points)
let p1 = [0.0, 0.0];
let p2 = [1.0, 0.0];
let p3 = [2.0, 0.0];
let center = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(center[0], 1.0, epsilon = EPS);
assert_relative_eq!(center[1], 0.0, epsilon = EPS);
// Test: Points forming a circle with radius = 1
let p1 = [0.0, 0.0];
let p2 = [0.0, 2.0];
let p3 = [2.0, 0.0];
let center = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(center[0], 1.0, epsilon = EPS);
assert_relative_eq!(center[1], 1.0, epsilon = EPS);
// Test: Integer coordinates
let p1 = [0.0, 0.0];
let p2 = [0.0, 6.0];
let p3 = [6.0, 0.0];
let center = calculate_circle_center(p1, p2, p3);
assert_relative_eq!(center[0], 3.0, epsilon = EPS);
assert_relative_eq!(center[1], 3.0, epsilon = EPS);
// Verify radius (should be 3 * sqrt(2))
let radius = ((center[0] - p1[0]).powi(2) + (center[1] - p1[1]).powi(2)).sqrt();
assert_relative_eq!(radius, 3.0 * 2.0_f64.sqrt(), epsilon = EPS);
}
} }
pub type Coords2d = [f64; 2]; pub type Coords2d = [f64; 2];

View File

@ -1,5 +1,8 @@
use std::fmt::Write; use std::fmt::Write;
#[cfg(feature = "cli")]
use clap::ValueEnum;
use crate::parsing::{ use crate::parsing::{
ast::types::{ ast::types::{
Annotation, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart, BodyItem, Annotation, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart, BodyItem,
@ -192,7 +195,7 @@ impl Node<Annotation> {
result.push_str(&indentation); result.push_str(&indentation);
result.push_str(comment); result.push_str(comment);
} }
if !result.ends_with("\n\n") && result != "\n" { if !comment.ends_with("*/") && !result.ends_with("\n\n") && result != "\n" {
result.push('\n'); result.push('\n');
} }
} }
@ -864,6 +867,29 @@ impl Parameter {
} }
} }
lazy_static::lazy_static! {
pub static ref IMPORT_FILE_EXTENSIONS: Vec<String> = {
let mut import_file_extensions = vec!["stp".to_string(), "glb".to_string(), "fbxb".to_string()];
#[cfg(feature = "cli")]
let named_extensions = kittycad::types::FileImportFormat::value_variants()
.iter()
.map(|x| format!("{}", x))
.collect::<Vec<String>>();
#[cfg(not(feature = "cli"))]
let named_extensions = vec![]; // We don't really need this outside of the CLI.
// Add all the default import formats.
import_file_extensions.extend_from_slice(&named_extensions);
import_file_extensions
};
pub static ref RELEVANT_EXTENSIONS: Vec<String> = {
let mut relevant_extensions = IMPORT_FILE_EXTENSIONS.clone();
relevant_extensions.push("kcl".to_string());
relevant_extensions
};
}
/// Collect all the kcl (and other relevant) files in a directory, recursively. /// Collect all the kcl (and other relevant) files in a directory, recursively.
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
#[async_recursion::async_recursion] #[async_recursion::async_recursion]
@ -883,7 +909,7 @@ pub async fn walk_dir(dir: &std::path::PathBuf) -> Result<Vec<std::path::PathBuf
files.extend(walk_dir(&path).await?); files.extend(walk_dir(&path).await?);
} else if path } else if path
.extension() .extension()
.is_some_and(|ext| crate::RELEVANT_FILE_EXTENSIONS.contains(&ext.to_string_lossy().to_string())) .is_some_and(|ext| RELEVANT_EXTENSIONS.contains(&ext.to_string_lossy().to_string()))
{ {
files.push(path); files.push(path);
} }
@ -1022,20 +1048,6 @@ bar = 0
assert_eq!(output, input); assert_eq!(output, input);
} }
#[test]
fn recast_annotations_with_block_comment() {
let input = r#"/* Start comment
sdfsdfsdfs */
@settings(defaultLengthUnit = in)
foo = 42
"#;
let program = crate::parsing::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
assert_eq!(output, input);
}
#[test] #[test]
fn test_recast_if_else_if_same() { fn test_recast_if_else_if_same() {
let input = r#"b = if false { let input = r#"b = if false {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 57 KiB

After

Width:  |  Height:  |  Size: 44 KiB

View File

@ -85,7 +85,7 @@ description: Artifact commands circle_three_point.kcl
"path": "[uuid]", "path": "[uuid]",
"to": { "to": {
"x": 30.00594901040716, "x": 30.00594901040716,
"y": 19.75, "y": 19.749999999999996,
"z": 0.0 "z": 0.0
} }
} }
@ -109,7 +109,7 @@ description: Artifact commands circle_three_point.kcl
"x": 24.75, "x": 24.75,
"y": 19.75 "y": 19.75
}, },
"radius": 5.25594901040716, "radius": 5.255949010407163,
"start": { "start": {
"unit": "degrees", "unit": "degrees",
"value": 0.0 "value": 0.0

View File

@ -178,7 +178,7 @@ description: Artifact commands intersect_cubes.kcl
"command": { "command": {
"type": "extrude", "type": "extrude",
"target": "[uuid]", "target": "[uuid]",
"distance": 20.0, "distance": 10.0,
"faces": null, "faces": null,
"opposite": "None" "opposite": "None"
} }
@ -342,7 +342,7 @@ description: Artifact commands intersect_cubes.kcl
"type": "move_path_pen", "type": "move_path_pen",
"path": "[uuid]", "path": "[uuid]",
"to": { "to": {
"x": 2.0, "x": -2.0,
"y": -2.0, "y": -2.0,
"z": 0.0 "z": 0.0
} }
@ -364,7 +364,7 @@ description: Artifact commands intersect_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 12.0, "x": 18.0,
"y": -2.0, "y": -2.0,
"z": 0.0 "z": 0.0
}, },
@ -381,8 +381,8 @@ description: Artifact commands intersect_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 12.0, "x": 18.0,
"y": 8.0, "y": 18.0,
"z": 0.0 "z": 0.0
}, },
"relative": false "relative": false
@ -398,8 +398,8 @@ description: Artifact commands intersect_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 2.0, "x": -2.0,
"y": 8.0, "y": 18.0,
"z": 0.0 "z": 0.0
}, },
"relative": false "relative": false
@ -544,41 +544,5 @@ description: Artifact commands intersect_cubes.kcl
"edge_id": "[uuid]", "edge_id": "[uuid]",
"face_id": "[uuid]" "face_id": "[uuid]"
} }
},
{
"cmdId": "[uuid]",
"range": [],
"command": {
"type": "set_object_transform",
"object_id": "[uuid]",
"transforms": [
{
"translate": {
"property": {
"x": 0.0,
"y": 0.0,
"z": 1.0
},
"set": false,
"is_local": true
},
"rotate_rpy": null,
"rotate_angle_axis": null,
"scale": null
}
]
}
},
{
"cmdId": "[uuid]",
"range": [],
"command": {
"type": "boolean_intersection",
"solid_ids": [
"[uuid]",
"[uuid]"
],
"tolerance": 0.0000001
}
} }
] ]

View File

@ -1,23 +1,23 @@
```mermaid ```mermaid
flowchart LR flowchart LR
subgraph path2 [Path] subgraph path2 [Path]
2["Path<br>[58, 113, 0]"] 2["Path<br>[52, 103, 0]"]
3["Segment<br>[121, 177, 0]"] 3["Segment<br>[111, 163, 0]"]
4["Segment<br>[185, 241, 0]"] 4["Segment<br>[171, 223, 0]"]
5["Segment<br>[249, 305, 0]"] 5["Segment<br>[231, 283, 0]"]
6["Segment<br>[313, 320, 0]"] 6["Segment<br>[291, 298, 0]"]
7[Solid2d] 7[Solid2d]
end end
subgraph path24 [Path] subgraph path24 [Path]
24["Path<br>[58, 113, 0]"] 24["Path<br>[52, 103, 0]"]
25["Segment<br>[121, 177, 0]"] 25["Segment<br>[111, 163, 0]"]
26["Segment<br>[185, 241, 0]"] 26["Segment<br>[171, 223, 0]"]
27["Segment<br>[249, 305, 0]"] 27["Segment<br>[231, 283, 0]"]
28["Segment<br>[313, 320, 0]"] 28["Segment<br>[291, 298, 0]"]
29[Solid2d] 29[Solid2d]
end end
1["Plane<br>[33, 50, 0]"] 1["Plane<br>[27, 44, 0]"]
8["Sweep Extrusion<br>[328, 354, 0]"] 8["Sweep Extrusion<br>[306, 326, 0]"]
9[Wall] 9[Wall]
10[Wall] 10[Wall]
11[Wall] 11[Wall]
@ -32,8 +32,8 @@ flowchart LR
20["SweepEdge Adjacent"] 20["SweepEdge Adjacent"]
21["SweepEdge Opposite"] 21["SweepEdge Opposite"]
22["SweepEdge Adjacent"] 22["SweepEdge Adjacent"]
23["Plane<br>[33, 50, 0]"] 23["Plane<br>[27, 44, 0]"]
30["Sweep Extrusion<br>[328, 354, 0]"] 30["Sweep Extrusion<br>[306, 326, 0]"]
31[Wall] 31[Wall]
32[Wall] 32[Wall]
33[Wall] 33[Wall]
@ -48,7 +48,6 @@ flowchart LR
42["SweepEdge Adjacent"] 42["SweepEdge Adjacent"]
43["SweepEdge Opposite"] 43["SweepEdge Opposite"]
44["SweepEdge Adjacent"] 44["SweepEdge Adjacent"]
45["CompositeSolid Intersect<br>[448, 477, 0]"]
1 --- 2 1 --- 2
2 --- 3 2 --- 3
2 --- 4 2 --- 4
@ -115,6 +114,4 @@ flowchart LR
30 --- 42 30 --- 42
30 --- 43 30 --- 43
30 --- 44 30 --- 44
2 <--x 45
24 <--x 45
``` ```

View File

@ -101,20 +101,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -153,20 +149,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -254,20 +246,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -306,20 +294,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -402,20 +386,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -454,20 +434,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -550,20 +526,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -602,20 +574,16 @@ description: Result of parsing intersect_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -688,38 +656,14 @@ description: Result of parsing intersect_cubes.kcl
"arg": { "arg": {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"left": { "raw": "10",
"commentStart": 0,
"end": 0,
"raw": "2",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 2.0, "value": 10.0,
"suffix": "None" "suffix": "None"
} }
},
"operator": "*",
"right": {
"abs_path": false,
"commentStart": 0,
"end": 0,
"name": {
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0,
"type": "Identifier"
},
"path": [],
"start": 0,
"type": "Name",
"type": "Name"
},
"start": 0,
"type": "BinaryExpression",
"type": "BinaryExpression"
} }
} }
], ],
@ -775,16 +719,6 @@ description: Result of parsing intersect_cubes.kcl
"start": 0, "start": 0,
"type": "Identifier" "type": "Identifier"
} }
},
{
"type": "Parameter",
"identifier": {
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0,
"type": "Identifier"
}
} }
], ],
"start": 0, "start": 0,
@ -846,18 +780,6 @@ description: Result of parsing intersect_cubes.kcl
"start": 0, "start": 0,
"type": "ArrayExpression", "type": "ArrayExpression",
"type": "ArrayExpression" "type": "ArrayExpression"
},
{
"commentStart": 0,
"end": 0,
"raw": "10",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 10.0,
"suffix": "None"
}
} }
], ],
"callee": { "callee": {
@ -903,8 +825,6 @@ description: Result of parsing intersect_cubes.kcl
"type": "Identifier" "type": "Identifier"
}, },
"init": { "init": {
"body": [
{
"arguments": [ "arguments": [
{ {
"commentStart": 0, "commentStart": 0,
@ -912,24 +832,24 @@ description: Result of parsing intersect_cubes.kcl
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "7", "raw": "8",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 7.0, "value": 8.0,
"suffix": "None" "suffix": "None"
} }
}, },
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "3", "raw": "8",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 3.0, "value": 8.0,
"suffix": "None" "suffix": "None"
} }
} }
@ -938,18 +858,6 @@ description: Result of parsing intersect_cubes.kcl
"start": 0, "start": 0,
"type": "ArrayExpression", "type": "ArrayExpression",
"type": "ArrayExpression" "type": "ArrayExpression"
},
{
"commentStart": 0,
"end": 0,
"raw": "5",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 5.0,
"suffix": "None"
}
} }
], ],
"callee": { "callee": {
@ -973,60 +881,6 @@ description: Result of parsing intersect_cubes.kcl
"type": "CallExpression", "type": "CallExpression",
"type": "CallExpression" "type": "CallExpression"
}, },
{
"arguments": [
{
"type": "LabeledArg",
"label": {
"commentStart": 0,
"end": 0,
"name": "z",
"start": 0,
"type": "Identifier"
},
"arg": {
"commentStart": 0,
"end": 0,
"raw": "1",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 1.0,
"suffix": "None"
}
}
}
],
"callee": {
"abs_path": false,
"commentStart": 0,
"end": 0,
"name": {
"commentStart": 0,
"end": 0,
"name": "translate",
"start": 0,
"type": "Identifier"
},
"path": [],
"start": 0,
"type": "Name"
},
"commentStart": 0,
"end": 0,
"start": 0,
"type": "CallExpressionKw",
"type": "CallExpressionKw",
"unlabeled": null
}
],
"commentStart": 0,
"end": 0,
"start": 0,
"type": "PipeExpression",
"type": "PipeExpression"
},
"start": 0, "start": 0,
"type": "VariableDeclarator" "type": "VariableDeclarator"
}, },

View File

@ -1,15 +1,14 @@
fn cube(center, size) { fn cube(center) {
return startSketchOn(XY) return startSketchOn(XY)
|> startProfileAt([center[0] - size, center[1] - size], %) |> startProfileAt([center[0] - 10, center[1] - 10], %)
|> line(endAbsolute = [center[0] + size, center[1] - size]) |> line(endAbsolute = [center[0] + 10, center[1] - 10])
|> line(endAbsolute = [center[0] + size, center[1] + size]) |> line(endAbsolute = [center[0] + 10, center[1] + 10])
|> line(endAbsolute = [center[0] - size, center[1] + size]) |> line(endAbsolute = [center[0] - 10, center[1] + 10])
|> close() |> close()
|> extrude(length = 2 * size) |> extrude(length = 10)
} }
part001 = cube([0, 0], 10) part001 = cube([0, 0])
part002 = cube([7, 3], 5) part002 = cube([8, 8])
|> translate(z = 1)
fullPart = intersect([part001, part002]) fullPart = intersect([part001, part002])

View File

@ -10,7 +10,7 @@ description: Operations executed intersect_cubes.kcl
"name": "cube", "name": "cube",
"functionSourceRange": [ "functionSourceRange": [
7, 7,
356, 328,
0 0
], ],
"unlabeledArg": null, "unlabeledArg": null,
@ -38,7 +38,7 @@ description: Operations executed intersect_cubes.kcl
"length": { "length": {
"value": { "value": {
"type": "Number", "type": "Number",
"value": 20.0, "value": 10.0,
"ty": { "ty": {
"type": "Default", "type": "Default",
"len": { "len": {
@ -75,7 +75,7 @@ description: Operations executed intersect_cubes.kcl
"name": "cube", "name": "cube",
"functionSourceRange": [ "functionSourceRange": [
7, 7,
356, 328,
0 0
], ],
"unlabeledArg": null, "unlabeledArg": null,

View File

@ -7,9 +7,6 @@ description: Variables in memory after executing intersect_cubes.kcl
"type": "Function" "type": "Function"
}, },
"fullPart": { "fullPart": {
"type": "HomArray",
"value": [
{
"type": "Solid", "type": "Solid",
"value": { "value": {
"type": "Solid", "type": "Solid",
@ -179,7 +176,7 @@ description: Variables in memory after executing intersect_cubes.kcl
"type": "Mm" "type": "Mm"
} }
}, },
"height": 20.0, "height": 10.0,
"startCapId": "[uuid]", "startCapId": "[uuid]",
"endCapId": "[uuid]", "endCapId": "[uuid]",
"units": { "units": {
@ -187,186 +184,6 @@ description: Variables in memory after executing intersect_cubes.kcl
} }
} }
}, },
{
"type": "Solid",
"value": {
"type": "Solid",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": [
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
}
],
"sketch": {
"type": "Sketch",
"id": "[uuid]",
"paths": [
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
-10.0
],
"tag": null,
"to": [
10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
-10.0
],
"tag": null,
"to": [
10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
10.0
],
"tag": null,
"to": [
-10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
10.0
],
"tag": null,
"to": [
-10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
}
],
"on": {
"type": "plane",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": "XY",
"origin": {
"x": 0.0,
"y": 0.0,
"z": 0.0
},
"xAxis": {
"x": 1.0,
"y": 0.0,
"z": 0.0
},
"yAxis": {
"x": 0.0,
"y": 1.0,
"z": 0.0
},
"zAxis": {
"x": 0.0,
"y": 0.0,
"z": 1.0
},
"units": {
"type": "Mm"
}
},
"start": {
"from": [
-10.0,
-10.0
],
"to": [
-10.0,
-10.0
],
"units": {
"type": "Mm"
},
"tag": null,
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
}
},
"artifactId": "[uuid]",
"originalId": "[uuid]",
"units": {
"type": "Mm"
}
},
"height": 20.0,
"startCapId": "[uuid]",
"endCapId": "[uuid]",
"units": {
"type": "Mm"
}
}
}
]
},
"part001": { "part001": {
"type": "Solid", "type": "Solid",
"value": { "value": {
@ -537,7 +354,7 @@ description: Variables in memory after executing intersect_cubes.kcl
"type": "Mm" "type": "Mm"
} }
}, },
"height": 20.0, "height": 10.0,
"startCapId": "[uuid]", "startCapId": "[uuid]",
"endCapId": "[uuid]", "endCapId": "[uuid]",
"units": { "units": {
@ -591,12 +408,12 @@ description: Variables in memory after executing intersect_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
2.0, -2.0,
-2.0 -2.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
12.0, 18.0,
-2.0 -2.0
], ],
"type": "ToPoint", "type": "ToPoint",
@ -610,13 +427,13 @@ description: Variables in memory after executing intersect_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
12.0, 18.0,
-2.0 -2.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
12.0, 18.0,
8.0 18.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -629,13 +446,13 @@ description: Variables in memory after executing intersect_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
12.0, 18.0,
8.0 18.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
2.0, -2.0,
8.0 18.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -648,12 +465,12 @@ description: Variables in memory after executing intersect_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
2.0, -2.0,
8.0 18.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
2.0, -2.0,
-2.0 -2.0
], ],
"type": "ToPoint", "type": "ToPoint",
@ -693,11 +510,11 @@ description: Variables in memory after executing intersect_cubes.kcl
}, },
"start": { "start": {
"from": [ "from": [
2.0, -2.0,
-2.0 -2.0
], ],
"to": [ "to": [
2.0, -2.0,
-2.0 -2.0
], ],
"units": { "units": {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 67 KiB

View File

@ -2,18 +2,17 @@
source: kcl-lib/src/simulation_tests.rs source: kcl-lib/src/simulation_tests.rs
description: Result of unparsing intersect_cubes.kcl description: Result of unparsing intersect_cubes.kcl
--- ---
fn cube(center, size) { fn cube(center) {
return startSketchOn(XY) return startSketchOn(XY)
|> startProfileAt([center[0] - size, center[1] - size], %) |> startProfileAt([center[0] - 10, center[1] - 10], %)
|> line(endAbsolute = [center[0] + size, center[1] - size]) |> line(endAbsolute = [center[0] + 10, center[1] - 10])
|> line(endAbsolute = [center[0] + size, center[1] + size]) |> line(endAbsolute = [center[0] + 10, center[1] + 10])
|> line(endAbsolute = [center[0] - size, center[1] + size]) |> line(endAbsolute = [center[0] - 10, center[1] + 10])
|> close() |> close()
|> extrude(length = 2 * size) |> extrude(length = 10)
} }
part001 = cube([0, 0], 10) part001 = cube([0, 0])
part002 = cube([7, 3], 5) part002 = cube([8, 8])
|> translate(z = 1)
fullPart = intersect([part001, part002]) fullPart = intersect([part001, part002])

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 62 KiB

View File

@ -342,8 +342,8 @@ description: Artifact commands subtract_cylinder_from_cube.kcl
"type": "move_path_pen", "type": "move_path_pen",
"path": "[uuid]", "path": "[uuid]",
"to": { "to": {
"x": 4.0, "x": 2.0,
"y": 2.0, "y": 0.0,
"z": 0.0 "z": 0.0
} }
} }
@ -364,8 +364,8 @@ description: Artifact commands subtract_cylinder_from_cube.kcl
"segment": { "segment": {
"type": "arc", "type": "arc",
"center": { "center": {
"x": 2.0, "x": 0.0,
"y": 2.0 "y": 0.0
}, },
"radius": 2.0, "radius": 2.0,
"start": { "start": {
@ -410,7 +410,7 @@ description: Artifact commands subtract_cylinder_from_cube.kcl
"command": { "command": {
"type": "extrude", "type": "extrude",
"target": "[uuid]", "target": "[uuid]",
"distance": 5.0, "distance": 10.0,
"faces": null, "faces": null,
"opposite": "None" "opposite": "None"
} }
@ -458,19 +458,5 @@ description: Artifact commands subtract_cylinder_from_cube.kcl
"edge_id": "[uuid]", "edge_id": "[uuid]",
"face_id": "[uuid]" "face_id": "[uuid]"
} }
},
{
"cmdId": "[uuid]",
"range": [],
"command": {
"type": "boolean_subtract",
"target_ids": [
"[uuid]"
],
"tool_ids": [
"[uuid]"
],
"tolerance": 0.0000001
}
} }
] ]

View File

@ -30,13 +30,12 @@ flowchart LR
21["SweepEdge Opposite"] 21["SweepEdge Opposite"]
22["SweepEdge Adjacent"] 22["SweepEdge Adjacent"]
23["Plane<br>[363, 382, 0]"] 23["Plane<br>[363, 382, 0]"]
27["Sweep Extrusion<br>[429, 448, 0]"] 27["Sweep Extrusion<br>[429, 449, 0]"]
28[Wall] 28[Wall]
29["Cap Start"] 29["Cap Start"]
30["Cap End"] 30["Cap End"]
31["SweepEdge Opposite"] 31["SweepEdge Opposite"]
32["SweepEdge Adjacent"] 32["SweepEdge Adjacent"]
33["CompositeSolid Subtract<br>[461, 497, 0]"]
1 --- 2 1 --- 2
2 --- 3 2 --- 3
2 --- 4 2 --- 4
@ -82,6 +81,4 @@ flowchart LR
27 --- 30 27 --- 30
27 --- 31 27 --- 31
27 --- 32 27 --- 32
2 <--x 33
24 <--x 33
``` ```

View File

@ -876,24 +876,24 @@ description: Result of parsing subtract_cylinder_from_cube.kcl
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "2", "raw": "0",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 2.0, "value": 0.0,
"suffix": "None" "suffix": "None"
} }
}, },
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "2", "raw": "0",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 2.0, "value": 0.0,
"suffix": "None" "suffix": "None"
} }
} }
@ -963,12 +963,12 @@ description: Result of parsing subtract_cylinder_from_cube.kcl
"arg": { "arg": {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "5", "raw": "10",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 5.0, "value": 10.0,
"suffix": "None" "suffix": "None"
} }
} }

View File

@ -10,7 +10,7 @@ fn cube(center) {
part001 = cube([0, 0]) part001 = cube([0, 0])
part002 = startSketchOn('XY') part002 = startSketchOn('XY')
|> circle(center = [2, 2], radius = 2) |> circle(center = [0, 0], radius = 2)
|> extrude(length = 5) |> extrude(length = 10)
fullPart = subtract([part001], tools=[part002]) fullPart = subtract([part001], tools=[part002])

View File

@ -88,7 +88,7 @@ description: Operations executed subtract_cylinder_from_cube.kcl
"length": { "length": {
"value": { "value": {
"type": "Number", "type": "Number",
"value": 5.0, "value": 10.0,
"ty": { "ty": {
"type": "Default", "type": "Default",
"len": { "len": {

View File

@ -7,9 +7,6 @@ description: Variables in memory after executing subtract_cylinder_from_cube.kcl
"type": "Function" "type": "Function"
}, },
"fullPart": { "fullPart": {
"type": "HomArray",
"value": [
{
"type": "Solid", "type": "Solid",
"value": { "value": {
"type": "Solid", "type": "Solid",
@ -187,186 +184,6 @@ description: Variables in memory after executing subtract_cylinder_from_cube.kcl
} }
} }
}, },
{
"type": "Solid",
"value": {
"type": "Solid",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": [
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
}
],
"sketch": {
"type": "Sketch",
"id": "[uuid]",
"paths": [
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
-10.0
],
"tag": null,
"to": [
10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
-10.0
],
"tag": null,
"to": [
10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
10.0
],
"tag": null,
"to": [
-10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
10.0
],
"tag": null,
"to": [
-10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
}
],
"on": {
"type": "plane",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": "XY",
"origin": {
"x": 0.0,
"y": 0.0,
"z": 0.0
},
"xAxis": {
"x": 1.0,
"y": 0.0,
"z": 0.0
},
"yAxis": {
"x": 0.0,
"y": 1.0,
"z": 0.0
},
"zAxis": {
"x": 0.0,
"y": 0.0,
"z": 1.0
},
"units": {
"type": "Mm"
}
},
"start": {
"from": [
-10.0,
-10.0
],
"to": [
-10.0,
-10.0
],
"units": {
"type": "Mm"
},
"tag": null,
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
}
},
"artifactId": "[uuid]",
"originalId": "[uuid]",
"units": {
"type": "Mm"
}
},
"height": 10.0,
"startCapId": "[uuid]",
"endCapId": "[uuid]",
"units": {
"type": "Mm"
}
}
}
]
},
"part001": { "part001": {
"type": "Solid", "type": "Solid",
"value": { "value": {
@ -571,18 +388,18 @@ description: Variables in memory after executing subtract_cylinder_from_cube.kcl
}, },
"ccw": true, "ccw": true,
"center": [ "center": [
2.0, 0.0,
2.0 0.0
], ],
"from": [ "from": [
4.0, 2.0,
2.0 0.0
], ],
"radius": 2.0, "radius": 2.0,
"tag": null, "tag": null,
"to": [ "to": [
4.0, 2.0,
2.0 0.0
], ],
"type": "Circle", "type": "Circle",
"units": { "units": {
@ -621,12 +438,12 @@ description: Variables in memory after executing subtract_cylinder_from_cube.kcl
}, },
"start": { "start": {
"from": [ "from": [
4.0, 2.0,
2.0 0.0
], ],
"to": [ "to": [
4.0, 2.0,
2.0 0.0
], ],
"units": { "units": {
"type": "Mm" "type": "Mm"
@ -643,7 +460,7 @@ description: Variables in memory after executing subtract_cylinder_from_cube.kcl
"type": "Mm" "type": "Mm"
} }
}, },
"height": 5.0, "height": 10.0,
"startCapId": "[uuid]", "startCapId": "[uuid]",
"endCapId": "[uuid]", "endCapId": "[uuid]",
"units": { "units": {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

View File

@ -14,7 +14,7 @@ fn cube(center) {
part001 = cube([0, 0]) part001 = cube([0, 0])
part002 = startSketchOn(XY) part002 = startSketchOn(XY)
|> circle(center = [2, 2], radius = 2) |> circle(center = [0, 0], radius = 2)
|> extrude(length = 5) |> extrude(length = 10)
fullPart = subtract([part001], tools = [part002]) fullPart = subtract([part001], tools = [part002])

View File

@ -178,7 +178,7 @@ description: Artifact commands union_cubes.kcl
"command": { "command": {
"type": "extrude", "type": "extrude",
"target": "[uuid]", "target": "[uuid]",
"distance": 20.0, "distance": 10.0,
"faces": null, "faces": null,
"opposite": "None" "opposite": "None"
} }
@ -342,8 +342,8 @@ description: Artifact commands union_cubes.kcl
"type": "move_path_pen", "type": "move_path_pen",
"path": "[uuid]", "path": "[uuid]",
"to": { "to": {
"x": 2.0, "x": 10.0,
"y": -2.0, "y": 0.0,
"z": 0.0 "z": 0.0
} }
} }
@ -364,8 +364,8 @@ description: Artifact commands union_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 12.0, "x": 30.0,
"y": -2.0, "y": 0.0,
"z": 0.0 "z": 0.0
}, },
"relative": false "relative": false
@ -381,8 +381,8 @@ description: Artifact commands union_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 12.0, "x": 30.0,
"y": 8.0, "y": 20.0,
"z": 0.0 "z": 0.0
}, },
"relative": false "relative": false
@ -398,8 +398,8 @@ description: Artifact commands union_cubes.kcl
"segment": { "segment": {
"type": "line", "type": "line",
"end": { "end": {
"x": 2.0, "x": 10.0,
"y": 8.0, "y": 20.0,
"z": 0.0 "z": 0.0
}, },
"relative": false "relative": false
@ -544,41 +544,5 @@ description: Artifact commands union_cubes.kcl
"edge_id": "[uuid]", "edge_id": "[uuid]",
"face_id": "[uuid]" "face_id": "[uuid]"
} }
},
{
"cmdId": "[uuid]",
"range": [],
"command": {
"type": "set_object_transform",
"object_id": "[uuid]",
"transforms": [
{
"translate": {
"property": {
"x": 0.0,
"y": 0.0,
"z": 1.0
},
"set": false,
"is_local": true
},
"rotate_rpy": null,
"rotate_angle_axis": null,
"scale": null
}
]
}
},
{
"cmdId": "[uuid]",
"range": [],
"command": {
"type": "boolean_union",
"solid_ids": [
"[uuid]",
"[uuid]"
],
"tolerance": 0.0000001
}
} }
] ]

View File

@ -1,23 +1,23 @@
```mermaid ```mermaid
flowchart LR flowchart LR
subgraph path2 [Path] subgraph path2 [Path]
2["Path<br>[58, 113, 0]"] 2["Path<br>[52, 103, 0]"]
3["Segment<br>[121, 177, 0]"] 3["Segment<br>[111, 163, 0]"]
4["Segment<br>[185, 241, 0]"] 4["Segment<br>[171, 223, 0]"]
5["Segment<br>[249, 305, 0]"] 5["Segment<br>[231, 283, 0]"]
6["Segment<br>[313, 320, 0]"] 6["Segment<br>[291, 298, 0]"]
7[Solid2d] 7[Solid2d]
end end
subgraph path24 [Path] subgraph path24 [Path]
24["Path<br>[58, 113, 0]"] 24["Path<br>[52, 103, 0]"]
25["Segment<br>[121, 177, 0]"] 25["Segment<br>[111, 163, 0]"]
26["Segment<br>[185, 241, 0]"] 26["Segment<br>[171, 223, 0]"]
27["Segment<br>[249, 305, 0]"] 27["Segment<br>[231, 283, 0]"]
28["Segment<br>[313, 320, 0]"] 28["Segment<br>[291, 298, 0]"]
29[Solid2d] 29[Solid2d]
end end
1["Plane<br>[33, 50, 0]"] 1["Plane<br>[27, 44, 0]"]
8["Sweep Extrusion<br>[328, 354, 0]"] 8["Sweep Extrusion<br>[306, 326, 0]"]
9[Wall] 9[Wall]
10[Wall] 10[Wall]
11[Wall] 11[Wall]
@ -32,8 +32,8 @@ flowchart LR
20["SweepEdge Adjacent"] 20["SweepEdge Adjacent"]
21["SweepEdge Opposite"] 21["SweepEdge Opposite"]
22["SweepEdge Adjacent"] 22["SweepEdge Adjacent"]
23["Plane<br>[33, 50, 0]"] 23["Plane<br>[27, 44, 0]"]
30["Sweep Extrusion<br>[328, 354, 0]"] 30["Sweep Extrusion<br>[306, 326, 0]"]
31[Wall] 31[Wall]
32[Wall] 32[Wall]
33[Wall] 33[Wall]
@ -48,7 +48,6 @@ flowchart LR
42["SweepEdge Adjacent"] 42["SweepEdge Adjacent"]
43["SweepEdge Opposite"] 43["SweepEdge Opposite"]
44["SweepEdge Adjacent"] 44["SweepEdge Adjacent"]
45["CompositeSolid Union<br>[448, 473, 0]"]
1 --- 2 1 --- 2
2 --- 3 2 --- 3
2 --- 4 2 --- 4
@ -115,6 +114,4 @@ flowchart LR
30 --- 42 30 --- 42
30 --- 43 30 --- 43
30 --- 44 30 --- 44
2 <--x 45
24 <--x 45
``` ```

View File

@ -101,20 +101,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -153,20 +149,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -254,20 +246,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -306,20 +294,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -402,20 +386,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -454,20 +434,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -550,20 +526,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "-", "operator": "-",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -602,20 +574,16 @@ description: Result of parsing union_cubes.kcl
}, },
"operator": "+", "operator": "+",
"right": { "right": {
"abs_path": false,
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"name": { "raw": "10",
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0, "start": 0,
"type": "Identifier" "type": "Literal",
}, "type": "Literal",
"path": [], "value": {
"start": 0, "value": 10.0,
"type": "Name", "suffix": "None"
"type": "Name" }
}, },
"start": 0, "start": 0,
"type": "BinaryExpression", "type": "BinaryExpression",
@ -688,38 +656,14 @@ description: Result of parsing union_cubes.kcl
"arg": { "arg": {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"left": { "raw": "10",
"commentStart": 0,
"end": 0,
"raw": "2",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 2.0, "value": 10.0,
"suffix": "None" "suffix": "None"
} }
},
"operator": "*",
"right": {
"abs_path": false,
"commentStart": 0,
"end": 0,
"name": {
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0,
"type": "Identifier"
},
"path": [],
"start": 0,
"type": "Name",
"type": "Name"
},
"start": 0,
"type": "BinaryExpression",
"type": "BinaryExpression"
} }
} }
], ],
@ -775,16 +719,6 @@ description: Result of parsing union_cubes.kcl
"start": 0, "start": 0,
"type": "Identifier" "type": "Identifier"
} }
},
{
"type": "Parameter",
"identifier": {
"commentStart": 0,
"end": 0,
"name": "size",
"start": 0,
"type": "Identifier"
}
} }
], ],
"start": 0, "start": 0,
@ -846,18 +780,6 @@ description: Result of parsing union_cubes.kcl
"start": 0, "start": 0,
"type": "ArrayExpression", "type": "ArrayExpression",
"type": "ArrayExpression" "type": "ArrayExpression"
},
{
"commentStart": 0,
"end": 0,
"raw": "10",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 10.0,
"suffix": "None"
}
} }
], ],
"callee": { "callee": {
@ -903,8 +825,6 @@ description: Result of parsing union_cubes.kcl
"type": "Identifier" "type": "Identifier"
}, },
"init": { "init": {
"body": [
{
"arguments": [ "arguments": [
{ {
"commentStart": 0, "commentStart": 0,
@ -912,24 +832,24 @@ description: Result of parsing union_cubes.kcl
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "7", "raw": "20",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 7.0, "value": 20.0,
"suffix": "None" "suffix": "None"
} }
}, },
{ {
"commentStart": 0, "commentStart": 0,
"end": 0, "end": 0,
"raw": "3", "raw": "10",
"start": 0, "start": 0,
"type": "Literal", "type": "Literal",
"type": "Literal", "type": "Literal",
"value": { "value": {
"value": 3.0, "value": 10.0,
"suffix": "None" "suffix": "None"
} }
} }
@ -938,18 +858,6 @@ description: Result of parsing union_cubes.kcl
"start": 0, "start": 0,
"type": "ArrayExpression", "type": "ArrayExpression",
"type": "ArrayExpression" "type": "ArrayExpression"
},
{
"commentStart": 0,
"end": 0,
"raw": "5",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 5.0,
"suffix": "None"
}
} }
], ],
"callee": { "callee": {
@ -973,60 +881,6 @@ description: Result of parsing union_cubes.kcl
"type": "CallExpression", "type": "CallExpression",
"type": "CallExpression" "type": "CallExpression"
}, },
{
"arguments": [
{
"type": "LabeledArg",
"label": {
"commentStart": 0,
"end": 0,
"name": "z",
"start": 0,
"type": "Identifier"
},
"arg": {
"commentStart": 0,
"end": 0,
"raw": "1",
"start": 0,
"type": "Literal",
"type": "Literal",
"value": {
"value": 1.0,
"suffix": "None"
}
}
}
],
"callee": {
"abs_path": false,
"commentStart": 0,
"end": 0,
"name": {
"commentStart": 0,
"end": 0,
"name": "translate",
"start": 0,
"type": "Identifier"
},
"path": [],
"start": 0,
"type": "Name"
},
"commentStart": 0,
"end": 0,
"start": 0,
"type": "CallExpressionKw",
"type": "CallExpressionKw",
"unlabeled": null
}
],
"commentStart": 0,
"end": 0,
"start": 0,
"type": "PipeExpression",
"type": "PipeExpression"
},
"start": 0, "start": 0,
"type": "VariableDeclarator" "type": "VariableDeclarator"
}, },

View File

@ -1,15 +1,14 @@
fn cube(center, size) { fn cube(center) {
return startSketchOn(XY) return startSketchOn(XY)
|> startProfileAt([center[0] - size, center[1] - size], %) |> startProfileAt([center[0] - 10, center[1] - 10], %)
|> line(endAbsolute = [center[0] + size, center[1] - size]) |> line(endAbsolute = [center[0] + 10, center[1] - 10])
|> line(endAbsolute = [center[0] + size, center[1] + size]) |> line(endAbsolute = [center[0] + 10, center[1] + 10])
|> line(endAbsolute = [center[0] - size, center[1] + size]) |> line(endAbsolute = [center[0] - 10, center[1] + 10])
|> close() |> close()
|> extrude(length = 2 * size) |> extrude(length = 10)
} }
part001 = cube([0, 0], 10) part001 = cube([0, 0])
part002 = cube([7, 3], 5) part002 = cube([20, 10])
|> translate(z = 1)
fullPart = union([part001, part002]) fullPart = union([part001, part002])

View File

@ -10,7 +10,7 @@ description: Operations executed union_cubes.kcl
"name": "cube", "name": "cube",
"functionSourceRange": [ "functionSourceRange": [
7, 7,
356, 328,
0 0
], ],
"unlabeledArg": null, "unlabeledArg": null,
@ -38,7 +38,7 @@ description: Operations executed union_cubes.kcl
"length": { "length": {
"value": { "value": {
"type": "Number", "type": "Number",
"value": 20.0, "value": 10.0,
"ty": { "ty": {
"type": "Default", "type": "Default",
"len": { "len": {
@ -75,7 +75,7 @@ description: Operations executed union_cubes.kcl
"name": "cube", "name": "cube",
"functionSourceRange": [ "functionSourceRange": [
7, 7,
356, 328,
0 0
], ],
"unlabeledArg": null, "unlabeledArg": null,

View File

@ -7,9 +7,6 @@ description: Variables in memory after executing union_cubes.kcl
"type": "Function" "type": "Function"
}, },
"fullPart": { "fullPart": {
"type": "HomArray",
"value": [
{
"type": "Solid", "type": "Solid",
"value": { "value": {
"type": "Solid", "type": "Solid",
@ -179,7 +176,7 @@ description: Variables in memory after executing union_cubes.kcl
"type": "Mm" "type": "Mm"
} }
}, },
"height": 20.0, "height": 10.0,
"startCapId": "[uuid]", "startCapId": "[uuid]",
"endCapId": "[uuid]", "endCapId": "[uuid]",
"units": { "units": {
@ -187,186 +184,6 @@ description: Variables in memory after executing union_cubes.kcl
} }
} }
}, },
{
"type": "Solid",
"value": {
"type": "Solid",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": [
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
},
{
"faceId": "[uuid]",
"id": "[uuid]",
"sourceRange": [],
"tag": null,
"type": "extrudePlane"
}
],
"sketch": {
"type": "Sketch",
"id": "[uuid]",
"paths": [
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
-10.0
],
"tag": null,
"to": [
10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
-10.0
],
"tag": null,
"to": [
10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
10.0,
10.0
],
"tag": null,
"to": [
-10.0,
10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
},
{
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
},
"from": [
-10.0,
10.0
],
"tag": null,
"to": [
-10.0,
-10.0
],
"type": "ToPoint",
"units": {
"type": "Mm"
}
}
],
"on": {
"type": "plane",
"id": "[uuid]",
"artifactId": "[uuid]",
"value": "XY",
"origin": {
"x": 0.0,
"y": 0.0,
"z": 0.0
},
"xAxis": {
"x": 1.0,
"y": 0.0,
"z": 0.0
},
"yAxis": {
"x": 0.0,
"y": 1.0,
"z": 0.0
},
"zAxis": {
"x": 0.0,
"y": 0.0,
"z": 1.0
},
"units": {
"type": "Mm"
}
},
"start": {
"from": [
-10.0,
-10.0
],
"to": [
-10.0,
-10.0
],
"units": {
"type": "Mm"
},
"tag": null,
"__geoMeta": {
"id": "[uuid]",
"sourceRange": []
}
},
"artifactId": "[uuid]",
"originalId": "[uuid]",
"units": {
"type": "Mm"
}
},
"height": 20.0,
"startCapId": "[uuid]",
"endCapId": "[uuid]",
"units": {
"type": "Mm"
}
}
}
]
},
"part001": { "part001": {
"type": "Solid", "type": "Solid",
"value": { "value": {
@ -537,7 +354,7 @@ description: Variables in memory after executing union_cubes.kcl
"type": "Mm" "type": "Mm"
} }
}, },
"height": 20.0, "height": 10.0,
"startCapId": "[uuid]", "startCapId": "[uuid]",
"endCapId": "[uuid]", "endCapId": "[uuid]",
"units": { "units": {
@ -591,13 +408,13 @@ description: Variables in memory after executing union_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
2.0, 10.0,
-2.0 0.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
12.0, 30.0,
-2.0 0.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -610,13 +427,13 @@ description: Variables in memory after executing union_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
12.0, 30.0,
-2.0 0.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
12.0, 30.0,
8.0 20.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -629,13 +446,13 @@ description: Variables in memory after executing union_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
12.0, 30.0,
8.0 20.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
2.0, 10.0,
8.0 20.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -648,13 +465,13 @@ description: Variables in memory after executing union_cubes.kcl
"sourceRange": [] "sourceRange": []
}, },
"from": [ "from": [
2.0, 10.0,
8.0 20.0
], ],
"tag": null, "tag": null,
"to": [ "to": [
2.0, 10.0,
-2.0 0.0
], ],
"type": "ToPoint", "type": "ToPoint",
"units": { "units": {
@ -693,12 +510,12 @@ description: Variables in memory after executing union_cubes.kcl
}, },
"start": { "start": {
"from": [ "from": [
2.0, 10.0,
-2.0 0.0
], ],
"to": [ "to": [
2.0, 10.0,
-2.0 0.0
], ],
"units": { "units": {
"type": "Mm" "type": "Mm"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 71 KiB

After

Width:  |  Height:  |  Size: 62 KiB

View File

@ -2,18 +2,17 @@
source: kcl-lib/src/simulation_tests.rs source: kcl-lib/src/simulation_tests.rs
description: Result of unparsing union_cubes.kcl description: Result of unparsing union_cubes.kcl
--- ---
fn cube(center, size) { fn cube(center) {
return startSketchOn(XY) return startSketchOn(XY)
|> startProfileAt([center[0] - size, center[1] - size], %) |> startProfileAt([center[0] - 10, center[1] - 10], %)
|> line(endAbsolute = [center[0] + size, center[1] - size]) |> line(endAbsolute = [center[0] + 10, center[1] - 10])
|> line(endAbsolute = [center[0] + size, center[1] + size]) |> line(endAbsolute = [center[0] + 10, center[1] + 10])
|> line(endAbsolute = [center[0] - size, center[1] + size]) |> line(endAbsolute = [center[0] - 10, center[1] + 10])
|> close() |> close()
|> extrude(length = 2 * size) |> extrude(length = 10)
} }
part001 = cube([0, 0], 10) part001 = cube([0, 0])
part002 = cube([7, 3], 5) part002 = cube([20, 10])
|> translate(z = 1)
fullPart = union([part001, part002]) fullPart = union([part001, part002])

View File

@ -1,6 +1,6 @@
[package] [package]
name = "kcl-python-bindings" name = "kcl-python-bindings"
version = "0.3.60" version = "0.3.58"
edition = "2021" edition = "2021"
repository = "https://github.com/kittycad/modeling-app" repository = "https://github.com/kittycad/modeling-app"
exclude = ["tests/*", "files/*", "venv/*"] exclude = ["tests/*", "files/*", "venv/*"]

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-test-server" name = "kcl-test-server"
description = "A test server for KCL" description = "A test server for KCL"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "kcl-to-core" name = "kcl-to-core"
description = "Utility methods to convert kcl to engine core executable tests" description = "Utility methods to convert kcl to engine core executable tests"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
license = "MIT" license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app" repository = "https://github.com/KittyCAD/modeling-app"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "kcl-wasm-lib" name = "kcl-wasm-lib"
version = "0.1.60" version = "0.1.58"
edition = "2021" edition = "2021"
repository = "https://github.com/KittyCAD/modeling-app" repository = "https://github.com/KittyCAD/modeling-app"
rust-version = "1.83" rust-version = "1.83"

View File

@ -292,22 +292,3 @@ pub fn get_kcl_version() -> String {
kcl_lib::version().to_string() kcl_lib::version().to_string()
} }
/// Get the allowed import file extensions.
#[wasm_bindgen]
pub fn import_file_extensions() -> Result<Vec<String>, String> {
console_error_panic_hook::set_once();
Ok(kcl_lib::IMPORT_FILE_EXTENSIONS.iter().map(|s| s.to_string()).collect())
}
/// Get the allowed relevant file extensions (imports + kcl).
#[wasm_bindgen]
pub fn relevant_file_extensions() -> Result<Vec<String>, String> {
console_error_panic_hook::set_once();
Ok(kcl_lib::RELEVANT_FILE_EXTENSIONS
.iter()
.map(|s| s.to_string())
.collect::<Vec<String>>())
}

View File

@ -1,37 +0,0 @@
// From https://github.com/OpenBuilds/OpenBuilds-CONTROL/blob/4800540ffaa517925fc2cff26670809efa341ffe/signWin.js
const { execSync } = require('node:child_process')
exports.default = async (configuration) => {
if (!process.env.SM_API_KEY) {
console.error(
'Signing using signWin.js script: failed: SM_API_KEY ENV VAR NOT FOUND'
)
return
}
if (!process.env.WINDOWS_CERTIFICATE_THUMBPRINT) {
console.error(
'Signing using signWin.js script: failed: FINGERPRINT ENV VAR NOT FOUND'
)
return
}
if (!configuration.path) {
throw new Error(
`Signing using signWin.js script: failed: TARGET PATH NOT FOUND`
)
}
try {
execSync(
`smctl sign --fingerprint="${process.env.WINDOWS_CERTIFICATE_THUMBPRINT
}" --input "${String(configuration.path)}"`,
{
stdio: 'inherit',
}
)
console.log('Signing using signWin.js script: successful')
} catch (error) {
throw new Error('Signing using signWin.js script: failed:', error)
}
}

View File

@ -1180,8 +1180,7 @@ export class SceneEntities {
_ast = pResult.program _ast = pResult.program
// do a quick mock execution to get the program memory up-to-date // do a quick mock execution to get the program memory up-to-date
const didReParse = await this.kclManager.executeAstMock(_ast) await this.kclManager.executeAstMock(_ast)
if (err(didReParse)) return didReParse
const justCreatedNode = getNodeFromPath<VariableDeclaration>( const justCreatedNode = getNodeFromPath<VariableDeclaration>(
_ast, _ast,
@ -1581,8 +1580,7 @@ export class SceneEntities {
_ast = pResult.program _ast = pResult.program
// do a quick mock execution to get the program memory up-to-date // do a quick mock execution to get the program memory up-to-date
const didReParse = await this.kclManager.executeAstMock(_ast) await this.kclManager.executeAstMock(_ast)
if (err(didReParse)) return didReParse
const { truncatedAst } = await this.setupSketch({ const { truncatedAst } = await this.setupSketch({
sketchEntryNodePath: updatedEntryNodePath, sketchEntryNodePath: updatedEntryNodePath,
@ -1777,8 +1775,7 @@ export class SceneEntities {
_ast = pResult.program _ast = pResult.program
// do a quick mock execution to get the program memory up-to-date // do a quick mock execution to get the program memory up-to-date
const didReParse = await this.kclManager.executeAstMock(_ast) await this.kclManager.executeAstMock(_ast)
if (err(didReParse)) return didReParse
const index = sg.paths.length // because we've added a new segment that's not in the memory yet const index = sg.paths.length // because we've added a new segment that's not in the memory yet
const draftExpressionsIndices = { start: index, end: index } const draftExpressionsIndices = { start: index, end: index }
@ -2005,8 +2002,7 @@ export class SceneEntities {
_ast = pResult.program _ast = pResult.program
// do a quick mock execution to get the program memory up-to-date // do a quick mock execution to get the program memory up-to-date
const didReParse = await this.kclManager.executeAstMock(_ast) await this.kclManager.executeAstMock(_ast)
if (err(didReParse)) return didReParse
const index = sg.paths.length // because we've added a new segment that's not in the memory yet const index = sg.paths.length // because we've added a new segment that's not in the memory yet
const draftExpressionsIndices = { start: index, end: index } const draftExpressionsIndices = { start: index, end: index }
@ -2271,8 +2267,7 @@ export class SceneEntities {
_ast = pResult.program _ast = pResult.program
// do a quick mock execution to get the program memory up-to-date // do a quick mock execution to get the program memory up-to-date
const didReParse = await this.kclManager.executeAstMock(_ast) await this.kclManager.executeAstMock(_ast)
if (err(didReParse)) return didReParse
const { truncatedAst } = await this.setupSketch({ const { truncatedAst } = await this.setupSketch({
sketchEntryNodePath: updatedEntryNodePath, sketchEntryNodePath: updatedEntryNodePath,
@ -2505,10 +2500,7 @@ export class SceneEntities {
addingNewSegmentStatus = 'pending' addingNewSegmentStatus = 'pending'
if (trap(mod)) return if (trap(mod)) return
const didReParse = await this.kclManager.executeAstMock( await this.kclManager.executeAstMock(mod.modifiedAst)
mod.modifiedAst
)
if (err(didReParse)) return
this.tearDownSketch({ removeAxis: false }) this.tearDownSketch({ removeAxis: false })
// eslint-disable-next-line @typescript-eslint/no-floating-promises // eslint-disable-next-line @typescript-eslint/no-floating-promises
this.setupSketch({ this.setupSketch({

View File

@ -14,7 +14,6 @@ import {
commandBarActor, commandBarActor,
useCommandBarState, useCommandBarState,
} from '@src/machines/commandBarMachine' } from '@src/machines/commandBarMachine'
import toast from 'react-hot-toast'
export const COMMAND_PALETTE_HOTKEY = 'mod+k' export const COMMAND_PALETTE_HOTKEY = 'mod+k'
@ -36,23 +35,13 @@ export const CommandBar = () => {
commandBarActor.send({ type: 'Close' }) commandBarActor.send({ type: 'Close' })
}, [pathname]) }, [pathname])
/**
* if the engine connection is about to end, we don't want users
* to be able to perform commands that might require that connection,
* so we just close the command palette.
* TODO: instead, let each command control whether it is disabled, and
* don't just bail out
*/
useEffect(() => { useEffect(() => {
if ( if (
!commandBarActor.getSnapshot().matches('Closed') && immediateState.type !== EngineConnectionStateType.ConnectionEstablished
(immediateState.type === EngineConnectionStateType.Disconnecting ||
immediateState.type === EngineConnectionStateType.Disconnected)
) { ) {
commandBarActor.send({ type: 'Close' }) commandBarActor.send({ type: 'Close' })
toast.error('Exiting command flow because engine disconnected')
} }
}, [immediateState, commandBarActor]) }, [immediateState])
// Hook up keyboard shortcuts // Hook up keyboard shortcuts
useHotkeyWrapper([COMMAND_PALETTE_HOTKEY], () => { useHotkeyWrapper([COMMAND_PALETTE_HOTKEY], () => {

View File

@ -760,8 +760,7 @@ export const ModelingMachineProvider = ({
// remove body item at varDecIndex // remove body item at varDecIndex
newAst.body = newAst.body.filter((_, i) => i !== varDecIndex) newAst.body = newAst.body.filter((_, i) => i !== varDecIndex)
const didReParse = await kclManager.executeAstMock(newAst) await kclManager.executeAstMock(newAst)
if (err(didReParse)) return reject(didReParse)
await codeManager.updateEditorWithAstAndWriteToFile(newAst) await codeManager.updateEditorWithAstAndWriteToFile(newAst)
} }
sceneInfra.setCallbacks({ sceneInfra.setCallbacks({
@ -774,7 +773,6 @@ export const ModelingMachineProvider = ({
'animate-to-face': fromPromise(async ({ input }) => { 'animate-to-face': fromPromise(async ({ input }) => {
if (!input) return null if (!input) return null
if (input.type === 'extrudeFace' || input.type === 'offsetPlane') { if (input.type === 'extrudeFace' || input.type === 'offsetPlane') {
const originalCode = codeManager.code
const sketched = const sketched =
input.type === 'extrudeFace' input.type === 'extrudeFace'
? sketchOnExtrudedFace( ? sketchOnExtrudedFace(
@ -793,13 +791,7 @@ export const ModelingMachineProvider = ({
} }
const { modifiedAst, pathToNode: pathToNewSketchNode } = sketched const { modifiedAst, pathToNode: pathToNewSketchNode } = sketched
const didReParse = await kclManager.executeAstMock(modifiedAst) await kclManager.executeAstMock(modifiedAst)
if (err(didReParse)) {
// there was a problem, restore the original code
codeManager.code = originalCode
await kclManager.executeCode()
return reject(didReParse)
}
const id = const id =
input.type === 'extrudeFace' ? input.faceId : input.planeId input.type === 'extrudeFace' ? input.faceId : input.planeId

View File

@ -4,7 +4,6 @@ import type { MouseEventHandler } from 'react'
import { useCallback, useContext, useEffect, useMemo } from 'react' import { useCallback, useContext, useEffect, useMemo } from 'react'
import { useHotkeys } from 'react-hotkeys-hook' import { useHotkeys } from 'react-hotkeys-hook'
import { useAppState } from '@src/AppState'
import { ActionIcon } from '@src/components/ActionIcon' import { ActionIcon } from '@src/components/ActionIcon'
import type { CustomIconName } from '@src/components/CustomIcon' import type { CustomIconName } from '@src/components/CustomIcon'
import { MachineManagerContext } from '@src/components/MachineManagerProvider' import { MachineManagerContext } from '@src/components/MachineManagerProvider'
@ -17,10 +16,7 @@ import { sidebarPanes } from '@src/components/ModelingSidebar/ModelingPanes'
import Tooltip from '@src/components/Tooltip' import Tooltip from '@src/components/Tooltip'
import { DEV } from '@src/env' import { DEV } from '@src/env'
import { useModelingContext } from '@src/hooks/useModelingContext' import { useModelingContext } from '@src/hooks/useModelingContext'
import { useNetworkContext } from '@src/hooks/useNetworkContext'
import { NetworkHealthState } from '@src/hooks/useNetworkStatus'
import { useKclContext } from '@src/lang/KclProvider' import { useKclContext } from '@src/lang/KclProvider'
import { EngineConnectionStateType } from '@src/lang/std/engineConnection'
import { SIDEBAR_BUTTON_SUFFIX } from '@src/lib/constants' import { SIDEBAR_BUTTON_SUFFIX } from '@src/lib/constants'
import { isDesktop } from '@src/lib/isDesktop' import { isDesktop } from '@src/lib/isDesktop'
import { useSettings } from '@src/machines/appMachine' import { useSettings } from '@src/machines/appMachine'
@ -56,16 +52,6 @@ export function ModelingSidebar({ paneOpacity }: ModelingSidebarProps) {
: 'pointer-events-auto ' : 'pointer-events-auto '
const showDebugPanel = settings.app.showDebugPanel const showDebugPanel = settings.app.showDebugPanel
const { overallState, immediateState } = useNetworkContext()
const { isExecuting } = useKclContext()
const { isStreamReady } = useAppState()
const reliesOnEngine =
(overallState !== NetworkHealthState.Ok &&
overallState !== NetworkHealthState.Weak) ||
isExecuting ||
immediateState.type !== EngineConnectionStateType.ConnectionEstablished ||
!isStreamReady
const paneCallbackProps = useMemo( const paneCallbackProps = useMemo(
() => ({ () => ({
kclContext, kclContext,
@ -107,8 +93,6 @@ export function ModelingSidebar({ paneOpacity }: ModelingSidebarProps) {
sidebarName: 'Export part', sidebarName: 'Export part',
icon: 'floppyDiskArrow', icon: 'floppyDiskArrow',
keybinding: 'Ctrl + Shift + E', keybinding: 'Ctrl + Shift + E',
disable: () =>
reliesOnEngine ? 'Need engine connection to export' : undefined,
action: () => action: () =>
commandBarActor.send({ commandBarActor.send({
type: 'Find and select command', type: 'Find and select command',

Some files were not shown because too many files have changed in this diff Show More