Compare commits
59 Commits
pierremtb/
...
codex/fix-
Author | SHA1 | Date | |
---|---|---|---|
9b901dfe51 | |||
416de9a9fb | |||
da65426ddc | |||
585b485852 | |||
e85f16ff9c | |||
e7d2289a14 | |||
d35531758d | |||
729e0a7949 | |||
620b7401aa | |||
e3e67b00d5 | |||
49d4f8e5c3 | |||
47b159c605 | |||
c7b086fa69 | |||
203db79204 | |||
48a4fd8373 | |||
17eb84325f | |||
ebf048478d | |||
28a8cd2421 | |||
1506de92f5 | |||
8a03413643 | |||
f59b806a88 | |||
23a0085c78 | |||
a280a8c3f0 | |||
11620dfa6b | |||
f6e26e0bab | |||
f6b3a55cbf | |||
74939e5cd6 | |||
9906c9947a | |||
48d6a21f0a | |||
dd6a980915 | |||
2516df3a39 | |||
52125f0566 | |||
e489222b6a | |||
d93a57d7bf | |||
d34aea345b | |||
0b6102b0ac | |||
9e0873ed84 | |||
8587eb5fea | |||
b898c27e74 | |||
3026866a16 | |||
92fc294eae | |||
21e967ea7f | |||
3f00e7186c | |||
d3a4fd8b55 | |||
2be7107cca | |||
94f194a984 | |||
4fe880a970 | |||
8f5fbfc273 | |||
e660f52bb0 | |||
d74fdd9369 | |||
334145f0be | |||
c24073b6ae | |||
078b7f3bf7 | |||
3d65676ccb | |||
ce566fb6e5 | |||
b23fc9f623 | |||
5c2dfb8e40 | |||
0e341d7863 | |||
6a03ff9596 |
55
.github/workflows/build-apps.yml
vendored
@ -123,18 +123,6 @@ jobs:
|
||||
- id: export_notes
|
||||
run: echo "notes=`cat release-notes.md`" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Prepare electron-builder.yml file for updater test
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
run: |
|
||||
yq -i '.publish[0].url = "https://dl.zoo.dev/releases/modeling-app/updater-test"' electron-builder.yml
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
with:
|
||||
name: prepared-files-updater-test
|
||||
path: |
|
||||
electron-builder.yml
|
||||
|
||||
|
||||
build-apps:
|
||||
needs: [prepare-files]
|
||||
@ -259,49 +247,6 @@ jobs:
|
||||
|
||||
# TODO: add the 'Build for Mac TestFlight (nightly)' stage back
|
||||
|
||||
# The steps below are for updater-test builds, only on release
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
name: prepared-files-updater-test
|
||||
|
||||
- name: Copy updated electron-builder.yml file for updater test
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
run: |
|
||||
ls -R prepared-files-updater-test
|
||||
cp prepared-files-updater-test/electron-builder.yml electron-builder.yml
|
||||
|
||||
- name: Build the app (updater-test)
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
env:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
CSC_LINK: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
CSC_KEYCHAIN: ${{ secrets.APPLE_SIGNING_IDENTITY }}
|
||||
WINDOWS_CERTIFICATE_THUMBPRINT: ${{ secrets.WINDOWS_CERTIFICATE_THUMBPRINT }}
|
||||
run: npm run tronb:package:prod
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
with:
|
||||
name: updater-test-arm64-${{ matrix.platform }}
|
||||
path: |
|
||||
out/*-arm64-win.exe
|
||||
out/*-arm64-mac.dmg
|
||||
out/*-arm64-linux.AppImage
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ env.IS_RELEASE == 'true' }}
|
||||
with:
|
||||
name: updater-test-x64-${{ matrix.platform }}
|
||||
path: |
|
||||
out/*-x64-win.exe
|
||||
out/*-x64-mac.dmg
|
||||
out/*-x86_64-linux.AppImage
|
||||
|
||||
|
||||
upload-apps-release:
|
||||
runs-on: ubuntu-22.04
|
||||
|
56
.github/workflows/build-wasm.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Build WASM
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
npm-build-wasm:
|
||||
runs-on: runs-on=${{ github.run_id }}/family=i7ie.2xlarge/image=ubuntu22-full-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # configured below
|
||||
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Use Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: './rust'
|
||||
|
||||
- name: Build Wasm
|
||||
shell: bash
|
||||
run: npm run build:wasm
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-wasm
|
||||
path: |
|
||||
rust/kcl-wasm-lib/pkg/kcl_wasm_lib*
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-ts-rs-bindings
|
||||
path: |
|
||||
rust/kcl-lib/bindings/*
|
50
.github/workflows/cargo-test.yml
vendored
@ -155,7 +155,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
- name: Install rust
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
@ -190,6 +190,54 @@ jobs:
|
||||
TAB_API_KEY: ${{ secrets.TAB_API_KEY }}
|
||||
CI_COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
CI_PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run-internal-kcl-samples:
|
||||
name: cargo test (internal-kcl-samples)
|
||||
runs-on:
|
||||
- runs-on=${{ github.run_id }}
|
||||
- runner=32cpu-linux-x64
|
||||
- extras=s3-cache
|
||||
steps:
|
||||
- uses: runs-on/action@v1
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
private-key: ${{ secrets.MODELING_APP_GH_APP_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
- name: Start Vector
|
||||
run: .github/ci-cd-scripts/start-vector-ubuntu.sh
|
||||
env:
|
||||
GH_ACTIONS_AXIOM_TOKEN: ${{ secrets.GH_ACTIONS_AXIOM_TOKEN }}
|
||||
OS_NAME: ${{ env.OS_NAME }}
|
||||
- uses: taiki-e/install-action@nextest
|
||||
- name: Download internal KCL samples
|
||||
run: git clone --depth=1 https://x-access-token:${{ secrets.GH_PAT_KCL_SAMPLES_INTERNAL }}@github.com/KittyCAD/kcl-samples-internal public/kcl-samples/internal
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |-
|
||||
cd rust/kcl-lib
|
||||
cargo nextest run \
|
||||
--retries=10 --no-fail-fast --features artifact-graph --profile=ci \
|
||||
internal \
|
||||
2>&1 | tee /tmp/github-actions.log
|
||||
env:
|
||||
TWENTY_TWENTY: overwrite
|
||||
INSTA_UPDATE: always
|
||||
EXPECTORATE: overwrite
|
||||
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN_DEV}}
|
||||
ZOO_HOST: https://api.dev.zoo.dev
|
||||
MODELING_APP_INTERNAL_SAMPLES_SECRET: ${{secrets.MODELING_APP_INTERNAL_SAMPLES_SECRET}}
|
||||
run-wasm-tests:
|
||||
name: Run wasm tests
|
||||
strategy:
|
||||
|
28
.github/workflows/e2e-tests.yml
vendored
@ -40,7 +40,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Download Wasm Cache
|
||||
- name: Download Wasm cache
|
||||
id: download-wasm
|
||||
if: ${{ github.event_name != 'schedule' && steps.filter.outputs.rust == 'false' }}
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
@ -52,7 +52,7 @@ jobs:
|
||||
branch: main
|
||||
path: rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Build WASM condition
|
||||
- name: Build Wasm condition
|
||||
id: wasm
|
||||
run: |
|
||||
set -euox pipefail
|
||||
@ -70,7 +70,7 @@ jobs:
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
|
||||
- name: Install rust
|
||||
- name: Install Rust
|
||||
if: ${{ steps.wasm.outputs.should-build-wasm == 'true' }}
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
@ -81,7 +81,7 @@ jobs:
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Rust Cache
|
||||
- name: Use Rust cache
|
||||
if: ${{ steps.wasm.outputs.should-build-wasm == 'true' }}
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
@ -117,7 +117,7 @@ jobs:
|
||||
- uses: actions/download-artifact@v4
|
||||
name: prepared-wasm
|
||||
|
||||
- name: Copy prepared wasm
|
||||
- name: Copy prepared Wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
@ -133,20 +133,17 @@ jobs:
|
||||
id: deps-install
|
||||
run: npm install
|
||||
|
||||
- name: Cache Playwright Browsers
|
||||
- name: Cache browsers
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ms-playwright/
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('package-lock.json') }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
- name: Install browsers
|
||||
run: npm run playwright install --with-deps
|
||||
|
||||
- name: build web
|
||||
run: npm run tronb:vite:dev
|
||||
|
||||
- name: Run ubuntu/chrome snapshots
|
||||
- name: Capture snapshots
|
||||
uses: nick-fields/retry@v3.0.2
|
||||
with:
|
||||
shell: bash
|
||||
@ -170,7 +167,7 @@ jobs:
|
||||
retention-days: 30
|
||||
overwrite: true
|
||||
|
||||
- name: Check for changes
|
||||
- name: Check diff
|
||||
if: ${{ github.ref != 'refs/heads/main' }}
|
||||
shell: bash
|
||||
id: git-check
|
||||
@ -181,9 +178,8 @@ jobs:
|
||||
else echo "modified=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Commit changes, if any
|
||||
# TODO: find a more reliable way to detect visual changes
|
||||
if: ${{ false && steps.git-check.outputs.modified == 'true' }}
|
||||
- name: Commit changes
|
||||
if: ${{ steps.git-check.outputs.modified == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
git add e2e/playwright/snapshot-tests.spec.ts-snapshots e2e/playwright/snapshots
|
||||
@ -193,7 +189,7 @@ jobs:
|
||||
git fetch origin
|
||||
echo ${{ github.head_ref }}
|
||||
git checkout ${{ github.head_ref }}
|
||||
git commit -m "A snapshot a day keeps the bugs away! 📷🐛" || true
|
||||
git commit --message "Update snapshots" || true
|
||||
git push
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
|
61
.github/workflows/kcl-language-server.yml
vendored
@ -21,14 +21,11 @@ on:
|
||||
- '**.rs'
|
||||
- .github/workflows/kcl-language-server.yml
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
@ -38,10 +35,9 @@ env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.15
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: vscode tests
|
||||
name: kcl-language-server (vscode tests)
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -77,8 +73,7 @@ jobs:
|
||||
include:
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
code-target:
|
||||
win32-x64
|
||||
code-target: win32-x64
|
||||
#- os: windows-latest
|
||||
#target: i686-pc-windows-msvc
|
||||
#code-target:
|
||||
@ -88,8 +83,7 @@ jobs:
|
||||
#code-target: win32-arm64
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
code-target:
|
||||
linux-x64
|
||||
code-target: linux-x64
|
||||
#- os: ubuntu-latest
|
||||
#target: aarch64-unknown-linux-musl
|
||||
#code-target: linux-arm64
|
||||
@ -105,41 +99,33 @@ jobs:
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
code-target: darwin-arm64
|
||||
|
||||
name: build-release (${{ matrix.target }})
|
||||
name: kcl-language-server build-release (${{ matrix.target }})
|
||||
runs-on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
env:
|
||||
RA_TARGET: ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
rm rust/rust-toolchain.toml
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: rust
|
||||
components: rust-src
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Update apt repositories
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' || matrix.os == 'ubuntu-latest'
|
||||
run: sudo apt-get update
|
||||
|
||||
- if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
name: Install deps
|
||||
shell: bash
|
||||
@ -164,64 +150,53 @@ jobs:
|
||||
zlib1g-dev
|
||||
|
||||
cargo install cross
|
||||
|
||||
- name: Install AArch64 target toolchain
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
run: sudo apt-get install gcc-aarch64-linux-gnu
|
||||
|
||||
- name: Install ARM target toolchain
|
||||
if: matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get install gcc-arm-linux-gnueabihf
|
||||
|
||||
- name: build
|
||||
run: |
|
||||
cd rust
|
||||
cargo kcl-language-server-release build --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
# npm will symlink which will cause issues w tarballing later
|
||||
yarn install
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }}
|
||||
|
||||
- name: Package Extension (nightly)
|
||||
if: startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }} --pre-release
|
||||
|
||||
- name: remove server
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
rm -rf server
|
||||
|
||||
- name: Package Extension (no server, release)
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu' && startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o ../build/kcl-language-server-no-server.vsix
|
||||
|
||||
- name: Package Extension (no server, nightly)
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu' && startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o ../build/kcl-language-server-no-server.vsix --pre-release
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-${{ matrix.target }}
|
||||
path: ./rust/build
|
||||
|
||||
build-release-x86_64-unknown-linux-musl:
|
||||
name: build-release (x86_64-unknown-linux-musl)
|
||||
name: kcl-language-server build-release (x86_64-unknown-linux-musl)
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RA_TARGET: x86_64-unknown-linux-musl
|
||||
@ -231,7 +206,6 @@ jobs:
|
||||
image: alpine:latest
|
||||
volumes:
|
||||
- /usr/local/cargo/registry:/usr/local/cargo/registry
|
||||
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -245,55 +219,46 @@ jobs:
|
||||
nodejs \
|
||||
npm \
|
||||
yarn
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
rm rust/rust-toolchain.toml
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: rust
|
||||
components: rust-src
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: build
|
||||
run: |
|
||||
cd rust
|
||||
cargo kcl-language-server-release build --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
# npm will symlink which will cause issues w tarballing later
|
||||
yarn install
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-alpine-x64.vsix" --target alpine-x64
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-alpine-x64.vsix" --target alpine-x64 --pre-release
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-x86_64-unknown-linux-musl
|
||||
path: ./rust/build
|
||||
|
||||
publish:
|
||||
name: publish
|
||||
name: kcl-language-server (publish)
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["build-release", "build-release-x86_64-unknown-linux-musl"]
|
||||
if: startsWith(github.event.ref, 'refs/tags')
|
||||
@ -301,22 +266,17 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- run: echo "TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
|
||||
- run: 'echo "TAG: $TAG"'
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-aarch64-apple-darwin
|
||||
@ -344,8 +304,7 @@ jobs:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-x86_64-pc-windows-msvc
|
||||
path:
|
||||
rust/build
|
||||
path: rust/build
|
||||
#- uses: actions/download-artifact@v4
|
||||
#with:
|
||||
#name: release-i686-pc-windows-msvc
|
||||
@ -356,21 +315,18 @@ jobs:
|
||||
#name: release-aarch64-pc-windows-msvc
|
||||
#path: rust/build
|
||||
- run: ls -al ./rust/build
|
||||
|
||||
- name: Publish Release
|
||||
uses: ./.github/actions/github-release
|
||||
with:
|
||||
files: "rust/build/*"
|
||||
name: ${{ env.TAG }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: move files to dir for upload
|
||||
shell: bash
|
||||
run: |
|
||||
cd rust
|
||||
mkdir -p releases/language-server/${{ env.TAG }}
|
||||
cp -r build/* releases/language-server/${{ env.TAG }}
|
||||
|
||||
- name: "Authenticate to Google Cloud"
|
||||
uses: "google-github-actions/auth@v2.1.8"
|
||||
with:
|
||||
@ -385,15 +341,12 @@ jobs:
|
||||
with:
|
||||
path: rust/releases
|
||||
destination: dl.kittycad.io
|
||||
|
||||
- run: rm rust/build/kcl-language-server-no-server.vsix
|
||||
|
||||
- name: Publish Extension (Code Marketplace, release)
|
||||
# token from https://dev.azure.com/kcl-language-server/
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce publish --pat ${{ secrets.VSCE_PAT }} --packagePath ../build/kcl-language-server-*.vsix
|
||||
|
||||
- name: Publish Extension (OpenVSX, release)
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
|
14
.github/workflows/kcl-python-bindings.yml
vendored
@ -4,7 +4,6 @@
|
||||
# maturin generate-ci github
|
||||
#
|
||||
name: kcl-python-bindings
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@ -27,16 +26,14 @@ on:
|
||||
- '**.rs'
|
||||
- .github/workflows/kcl-python-bindings.yml
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
linux-x86_64:
|
||||
name: kcl-python-bindings (linux-x86_64)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -58,8 +55,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-linux-x86_64
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
windows:
|
||||
name: kcl-python-bindings (windows)
|
||||
runs-on: windows-16-cores
|
||||
strategy:
|
||||
matrix:
|
||||
@ -84,8 +81,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-windows-${{ matrix.target }}
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
macos:
|
||||
name: kcl-python-bindings (macos)
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@ -110,8 +107,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-macos-${{ matrix.target }}
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
test:
|
||||
name: kcl-python-bindings (test)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -127,8 +124,8 @@ jobs:
|
||||
env:
|
||||
KITTYCAD_API_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
ZOO_HOST: https://api.dev.zoo.dev
|
||||
|
||||
sdist:
|
||||
name: kcl-python-bindings (sdist)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -151,7 +148,6 @@ jobs:
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
|
167
.github/workflows/static-analysis.yml
vendored
@ -28,53 +28,7 @@ jobs:
|
||||
- run: npm run fmt:check
|
||||
|
||||
npm-build-wasm:
|
||||
# Build the wasm blob once on the fastest runner.
|
||||
runs-on: runs-on=${{ github.run_id }}/family=i7ie.2xlarge/image=ubuntu22-full-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: './rust'
|
||||
|
||||
- name: Build Wasm
|
||||
shell: bash
|
||||
run: npm run build:wasm
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-wasm
|
||||
path: |
|
||||
rust/kcl-wasm-lib/pkg/kcl_wasm_lib*
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-ts-rs-bindings
|
||||
path: |
|
||||
rust/kcl-lib/bindings/*
|
||||
uses: ./.github/workflows/build-wasm.yml
|
||||
|
||||
npm-tsc:
|
||||
runs-on: ubuntu-latest
|
||||
@ -173,122 +127,3 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
- name: Run codespell
|
||||
uses: crate-ci/typos@v1.32.0
|
||||
|
||||
npm-unit-test-kcl-samples:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Download internal KCL samples
|
||||
run: git clone --depth=1 https://x-access-token:${{ secrets.GH_PAT_KCL_SAMPLES_INTERNAL }}@github.com/KittyCAD/kcl-samples-internal public/kcl-samples/internal
|
||||
|
||||
- name: Regenerate KCL samples manifest
|
||||
run: cd rust/kcl-lib && EXPECTORATE=overwrite cargo test generate_manifest
|
||||
|
||||
- name: Check public and internal KCL samples
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run test:unit:kcl-samples
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
npm-unit-test:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Run unit tests
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: xvfb-run -a npm run test:unit
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
- name: Check for changes
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
id: git-check
|
||||
run: |
|
||||
git add src/lang/std/artifactMapGraphs
|
||||
if git status src/lang/std/artifactMapGraphs | grep -q "Changes to be committed"
|
||||
then echo "modified=true" >> $GITHUB_OUTPUT
|
||||
else echo "modified=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Commit changes, if any
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' && steps.git-check.outputs.modified == 'true' }}
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git remote set-url origin https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git fetch origin
|
||||
echo ${{ github.head_ref }}
|
||||
git checkout ${{ github.head_ref }}
|
||||
# TODO when webkit works on ubuntu remove the os part of the commit message
|
||||
git commit -am "Look at this (photo)Graph *in the voice of Nickelback*" || true
|
||||
git push
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
124
.github/workflows/unit-tests.yml
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
name: Unit Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
npm-build-wasm:
|
||||
uses: ./.github/workflows/build-wasm.yml
|
||||
|
||||
npm-test-unit:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Run unit tests
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: xvfb-run -a npm run test:unit
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
- name: Check for changes
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
id: git-check
|
||||
run: |
|
||||
git add src/lang/std/artifactMapGraphs
|
||||
if git status src/lang/std/artifactMapGraphs | grep -q "Changes to be committed"
|
||||
then echo "modified=true" >> $GITHUB_OUTPUT
|
||||
else echo "modified=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Commit changes, if any
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' && steps.git-check.outputs.modified == 'true' }}
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git remote set-url origin https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git fetch origin
|
||||
echo ${{ github.head_ref }}
|
||||
git checkout ${{ github.head_ref }}
|
||||
# TODO when webkit works on ubuntu remove the os part of the commit message
|
||||
git commit -am "Look at this (photo)Graph *in the voice of Nickelback*" || true
|
||||
git push
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
npm-test-unit-components:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- name: Run component tests
|
||||
run: npm run test:unit:components
|
2
.gitignore
vendored
@ -58,6 +58,8 @@ trace.zip
|
||||
/public/kcl-samples/.github
|
||||
/public/kcl-samples/screenshots/main.kcl
|
||||
/public/kcl-samples/step/main.kcl
|
||||
/public/kcl-samples/internal
|
||||
/rust/kcl-lib/tests/kcl_samples/internal
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
|
@ -130,7 +130,7 @@ git tag $VERSION
|
||||
git push origin --tags
|
||||
```
|
||||
|
||||
This will trigger the `build-apps` workflow, set the version, build & sign the apps, and generate release files as well as updater-test artifacts.
|
||||
This will trigger the `build-apps` workflow, set the version, build & sign the apps, and generate release files.
|
||||
|
||||
The workflow should be listed right away [in this list](https://github.com/KittyCAD/modeling-app/actions/workflows/build-apps.yml?query=event%3Apush)).
|
||||
|
||||
@ -142,13 +142,10 @@ The release builds can be found under the `out-{arch}-{platform}` zip files, at
|
||||
|
||||
Manually test against this [list](https://github.com/KittyCAD/modeling-app/issues/3588) across Windows, MacOS, Linux and posting results as comments in the issue.
|
||||
|
||||
##### Updater-test builds
|
||||
|
||||
The other `build-apps` output in the release `build-apps` workflow (triggered by 2.) is `updater-test-{arch}-{platform}`. It's a semi-automated process: for macOS, Windows, and Linux, download the corresponding updater-test artifact file, install the app, run it, expect an updater prompt to a dummy v0.255.255, install it and check that the app comes back at that version.
|
||||
|
||||
The only difference with these builds is that they point to a different update location on the release bucket, with this dummy v0.255.255 always available. This helps ensuring that the version we release will be able to update to the next one available.
|
||||
|
||||
If the prompt doesn't show up, start the app in command line to grab the electron-updater logs. This is likely an issue with the current build that needs addressing (or the updater-test location in the storage bucket).
|
||||
A prompt should show up asking for a downgrade to the last release version. Running through that at the end of testing
|
||||
and making sure the current release candidate has the ability to be updated to what electron-updater points to is critical,
|
||||
but what is actually being downloaded and installed isn't.
|
||||
If the prompt doesn't show up, start the app in command line to grab the electron-updater logs. This is likely an issue with the current build that needs addressing.
|
||||
|
||||
```
|
||||
# Windows (PowerShell)
|
||||
|
1
Makefile
@ -114,7 +114,6 @@ test-unit: install ## Run the unit tests
|
||||
npm run test:unit:components
|
||||
@ curl -fs localhost:3000 >/dev/null || ( echo "Error: localhost:3000 not available, 'make run-web' first" && exit 1 )
|
||||
npm run test:unit
|
||||
npm run test:unit:kcl-samples
|
||||
|
||||
.PHONY: test-e2e
|
||||
test-e2e: test-e2e-$(TARGET)
|
||||
|
@ -29,6 +29,7 @@ THRE = "THRE" # Weird bug that wrongly detects THREEjs as a typo
|
||||
nwo = "nwo" # don't know what this is about tbh
|
||||
"ot" = "ot" # some abbreviation, idk what
|
||||
"oe" = "oe" # some abbreviation, idk what
|
||||
"colinear" = "colinear" # some engine shit, kidding
|
||||
|
||||
[default]
|
||||
extend-ignore-identifiers-re = [
|
||||
|
16
docs/kcl-std/consts/std-sweep-SKETCH_PLANE.md
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
title: "sweep::SKETCH_PLANE"
|
||||
subtitle: "Constant in std::sweep"
|
||||
excerpt: "Local/relative to a position centered within the plane being sketched on"
|
||||
layout: manual
|
||||
---
|
||||
|
||||
Local/relative to a position centered within the plane being sketched on
|
||||
|
||||
```kcl
|
||||
sweep::SKETCH_PLANE: string = 'sketchPlane'
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
16
docs/kcl-std/consts/std-sweep-TRAJECTORY.md
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
title: "sweep::TRAJECTORY"
|
||||
subtitle: "Constant in std::sweep"
|
||||
excerpt: "Local/relative to the trajectory curve"
|
||||
layout: manual
|
||||
---
|
||||
|
||||
Local/relative to the trajectory curve
|
||||
|
||||
```kcl
|
||||
sweep::TRAJECTORY: string = 'trajectoryCurve'
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
@ -128,6 +128,9 @@ layout: manual
|
||||
* [`E`](/docs/kcl-std/consts/std-math-E)
|
||||
* [`PI`](/docs/kcl-std/consts/std-math-PI)
|
||||
* [`TAU`](/docs/kcl-std/consts/std-math-TAU)
|
||||
* [**std::sweep**](/docs/kcl-std/modules/std-sweep)
|
||||
* [`sweep::SKETCH_PLANE`](/docs/kcl-std/consts/std-sweep-SKETCH_PLANE)
|
||||
* [`sweep::TRAJECTORY`](/docs/kcl-std/consts/std-sweep-TRAJECTORY)
|
||||
* [**std::turns**](/docs/kcl-std/modules/std-turns)
|
||||
* [`turns::HALF_TURN`](/docs/kcl-std/consts/std-turns-HALF_TURN)
|
||||
* [`turns::QUARTER_TURN`](/docs/kcl-std/consts/std-turns-QUARTER_TURN)
|
||||
|
17
docs/kcl-std/modules/std-sweep.md
Normal file
@ -0,0 +1,17 @@
|
||||
---
|
||||
title: "sweep"
|
||||
subtitle: "Module in std"
|
||||
excerpt: ""
|
||||
layout: manual
|
||||
---
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## Functions and constants
|
||||
|
||||
* [`sweep::SKETCH_PLANE`](/docs/kcl-std/consts/std-sweep-SKETCH_PLANE)
|
||||
* [`sweep::TRAJECTORY`](/docs/kcl-std/consts/std-sweep-TRAJECTORY)
|
||||
|
@ -19,6 +19,7 @@ You might also want the [KCL language reference](/docs/kcl-lang) or the [KCL gui
|
||||
* [`math`](/docs/kcl-std/modules/std-math)
|
||||
* [`sketch`](/docs/kcl-std/modules/std-sketch)
|
||||
* [`solid`](/docs/kcl-std/modules/std-solid)
|
||||
* [`sweep::sweep`](/docs/kcl-std/modules/std-sweep)
|
||||
* [`transform`](/docs/kcl-std/modules/std-transform)
|
||||
* [`turns::turns`](/docs/kcl-std/modules/std-turns)
|
||||
* [`types`](/docs/kcl-std/modules/std-types)
|
||||
|
@ -134,8 +134,6 @@ extrude001 = extrude(sketch001, length = 5)`
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
await homePage.goToModelingScene()
|
||||
|
||||
await page.waitForTimeout(1000)
|
||||
|
||||
// Ensure badge is present
|
||||
const codePaneButtonHolder = page.locator('#code-button-holder')
|
||||
await expect(codePaneButtonHolder).toContainText('notification', {
|
||||
@ -158,10 +156,14 @@ extrude001 = extrude(sketch001, length = 5)`
|
||||
await expect(
|
||||
page
|
||||
.getByText(
|
||||
'Modeling command failed: [ApiError { error_code: InternalEngine, message: "Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis" }]'
|
||||
'Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis'
|
||||
)
|
||||
.first()
|
||||
).toBeVisible()
|
||||
|
||||
// Make sure ApiError is not on the page.
|
||||
// This ensures we didn't nest the json
|
||||
await expect(page.getByText('ApiError')).not.toBeVisible()
|
||||
})
|
||||
|
||||
test('When error is not in view WITH LINTS you can click the badge to scroll to it', async ({
|
||||
@ -179,7 +181,7 @@ extrude001 = extrude(sketch001, length = 5)`
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
await homePage.goToModelingScene()
|
||||
|
||||
await scene.settled(cmdBar)
|
||||
// await scene.settled(cmdBar)
|
||||
|
||||
// Ensure badge is present
|
||||
const codePaneButtonHolder = page.locator('#code-button-holder')
|
||||
|
@ -58,12 +58,6 @@ test(
|
||||
await expect(submitButton).toBeVisible()
|
||||
await page.keyboard.press('Enter')
|
||||
|
||||
// Look out for the toast message
|
||||
const exportingToastMessage = page.getByText(`Exporting...`)
|
||||
const alreadyExportingToastMessage = page.getByText(`Already exporting`)
|
||||
await expect(exportingToastMessage).toBeVisible()
|
||||
await expect(alreadyExportingToastMessage).not.toBeVisible()
|
||||
|
||||
// Expect it to succeed
|
||||
const errorToastMessage = page.getByText(`Error while exporting`)
|
||||
const engineErrorToastMessage = page.getByText(`Nothing to export`)
|
||||
@ -71,8 +65,9 @@ test(
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await expect(exportingToastMessage).not.toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
|
||||
// Check for the exported file
|
||||
const firstFileFullPath = path.resolve(
|
||||
@ -141,7 +136,9 @@ test(
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
await expect(exportingToastMessage).not.toBeVisible()
|
||||
|
||||
// Check for the exported file=
|
||||
|
@ -1533,7 +1533,6 @@ sketch001 = startSketchOn(XZ)
|
||||
await homePage.goToModelingScene()
|
||||
|
||||
await scene.connectionEstablished()
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
await scene.expectPixelColor(
|
||||
TEST_COLORS.DARK_MODE_BKGD,
|
||||
|
@ -197,18 +197,6 @@ test.describe(
|
||||
await clickElectronNativeMenuById(tronApp, 'File.Export current part')
|
||||
await cmdBar.expectCommandName('Export')
|
||||
})
|
||||
await test.step('Modeling.File.Share part via Zoo link', async () => {
|
||||
await page.waitForTimeout(250)
|
||||
await clickElectronNativeMenuById(
|
||||
tronApp,
|
||||
'File.Share part via Zoo link'
|
||||
)
|
||||
const textToCheck =
|
||||
'Link copied to clipboard. Anyone who clicks this link will get a copy of this file. Share carefully!'
|
||||
// Check if text appears anywhere in the page
|
||||
const isTextVisible = page.getByText(textToCheck)
|
||||
await expect(isTextVisible).toBeVisible({ timeout: 10000 })
|
||||
})
|
||||
await test.step('Modeling.File.Preferences.Project settings', async () => {
|
||||
await page.waitForTimeout(250)
|
||||
await clickElectronNativeMenuById(
|
||||
|
@ -6,7 +6,6 @@ test.describe('Onboarding tests', () => {
|
||||
homePage,
|
||||
toolbar,
|
||||
editor,
|
||||
scene,
|
||||
tronApp,
|
||||
}) => {
|
||||
if (!tronApp) {
|
||||
@ -62,7 +61,6 @@ test.describe('Onboarding tests', () => {
|
||||
await editor.expectEditor.toContain('@settings(defaultLengthUnit = in)', {
|
||||
shouldNormalise: true,
|
||||
})
|
||||
await scene.connectionEstablished()
|
||||
})
|
||||
|
||||
await test.step('Go home and verify we still see the tutorial button, then begin it.', async () => {
|
||||
@ -132,9 +130,7 @@ test.describe('Onboarding tests', () => {
|
||||
})
|
||||
|
||||
await test.step('Dismiss the onboarding', async () => {
|
||||
await postDismissToast.waitFor({ state: 'hidden' })
|
||||
await page.keyboard.press('Escape')
|
||||
await expect(postDismissToast).toBeVisible()
|
||||
await expect(page.getByTestId('onboarding-content')).not.toBeVisible()
|
||||
await expect.poll(() => page.url()).not.toContain('/onboarding')
|
||||
})
|
||||
@ -162,13 +158,10 @@ test.describe('Onboarding tests', () => {
|
||||
await test.step('Gets to the onboarding start', async () => {
|
||||
await expect(toolbar.projectName).toContainText('tutorial-project')
|
||||
await expect(tutorialWelcomeHeading).toBeVisible()
|
||||
await scene.connectionEstablished()
|
||||
})
|
||||
|
||||
await test.step('Dismiss the onboarding', async () => {
|
||||
await postDismissToast.waitFor({ state: 'hidden' })
|
||||
await page.keyboard.press('Escape')
|
||||
await expect(postDismissToast).toBeVisible()
|
||||
await expect(page.getByTestId('onboarding-content')).not.toBeVisible()
|
||||
await expect.poll(() => page.url()).not.toContain('/onboarding')
|
||||
})
|
||||
|
@ -1931,50 +1931,37 @@ sketch002 = startSketchOn(XZ)
|
||||
})
|
||||
})
|
||||
|
||||
test(`Sweep point-and-click failing validation`, async ({
|
||||
test(`Sweep point-and-click helix`, async ({
|
||||
context,
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
editor,
|
||||
toolbar,
|
||||
cmdBar,
|
||||
}) => {
|
||||
const initialCode = `@settings(defaultLengthUnit = in)
|
||||
sketch001 = startSketchOn(YZ)
|
||||
|> circle(
|
||||
center = [0, 0],
|
||||
radius = 500
|
||||
const circleCode = `circle(sketch001, center = [0, -1], radius = .1)`
|
||||
const initialCode = `helix001 = helix(
|
||||
axis = X,
|
||||
radius = 1,
|
||||
length = 10,
|
||||
revolutions = 10,
|
||||
angleStart = 0,
|
||||
ccw = false,
|
||||
)
|
||||
sketch002 = startSketchOn(XZ)
|
||||
|> startProfile(at = [0, 0])
|
||||
|> xLine(length = -500)
|
||||
|> line(endAbsolute = [-2000, 500])
|
||||
`
|
||||
sketch001 = startSketchOn(XZ)
|
||||
profile001 = ${circleCode}`
|
||||
const sweepDeclaration = 'sweep001 = sweep(profile001, path = helix001)'
|
||||
|
||||
await context.addInitScript((initialCode) => {
|
||||
localStorage.setItem('persistCode', initialCode)
|
||||
}, initialCode)
|
||||
await page.setBodyDimensions({ width: 1000, height: 500 })
|
||||
await homePage.goToModelingScene()
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
// One dumb hardcoded screen pixel value
|
||||
const testPoint = { x: 700, y: 250 }
|
||||
const [clickOnSketch1] = scene.makeMouseHelpers(testPoint.x, testPoint.y)
|
||||
const [clickOnSketch2] = scene.makeMouseHelpers(
|
||||
testPoint.x - 50,
|
||||
testPoint.y
|
||||
)
|
||||
|
||||
await test.step(`Look for sketch001`, async () => {
|
||||
await toolbar.closePane('code')
|
||||
await scene.expectPixelColor([53, 53, 53], testPoint, 15)
|
||||
})
|
||||
|
||||
await test.step(`Go through the command bar flow and fail validation with a toast`, async () => {
|
||||
await test.step(`Add sweep through the command bar flow`, async () => {
|
||||
await toolbar.openPane('feature-tree')
|
||||
await toolbar.sweepButton.click()
|
||||
await expect
|
||||
.poll(() => page.getByText('Please select one').count())
|
||||
.toBe(1)
|
||||
await cmdBar.expectState({
|
||||
commandName: 'Sweep',
|
||||
currentArgKey: 'sketches',
|
||||
@ -1987,7 +1974,8 @@ sketch002 = startSketchOn(XZ)
|
||||
highlightedHeaderArg: 'sketches',
|
||||
stage: 'arguments',
|
||||
})
|
||||
await clickOnSketch1()
|
||||
await editor.scrollToText(circleCode)
|
||||
await page.getByText(circleCode).click()
|
||||
await cmdBar.progressCmdBar()
|
||||
await cmdBar.expectState({
|
||||
commandName: 'Sweep',
|
||||
@ -2001,11 +1989,39 @@ sketch002 = startSketchOn(XZ)
|
||||
highlightedHeaderArg: 'path',
|
||||
stage: 'arguments',
|
||||
})
|
||||
await clickOnSketch2()
|
||||
const helix = await toolbar.getFeatureTreeOperation('Helix', 0)
|
||||
await helix.click()
|
||||
await cmdBar.expectState({
|
||||
commandName: 'Sweep',
|
||||
currentArgKey: 'path',
|
||||
currentArgValue: '',
|
||||
headerArguments: {
|
||||
Sectional: '',
|
||||
Sketches: '1 face',
|
||||
Path: '',
|
||||
},
|
||||
highlightedHeaderArg: 'path',
|
||||
stage: 'arguments',
|
||||
})
|
||||
await cmdBar.progressCmdBar()
|
||||
await expect(
|
||||
page.getByText('Unable to sweep with the current selection. Reason:')
|
||||
).toBeVisible()
|
||||
await cmdBar.expectState({
|
||||
commandName: 'Sweep',
|
||||
headerArguments: {
|
||||
Sketches: '1 face',
|
||||
Path: '1 helix',
|
||||
Sectional: '',
|
||||
},
|
||||
stage: 'review',
|
||||
})
|
||||
await cmdBar.progressCmdBar()
|
||||
await editor.expectEditor.toContain(sweepDeclaration)
|
||||
})
|
||||
|
||||
await test.step('Delete sweep via feature tree selection', async () => {
|
||||
const sweep = await toolbar.getFeatureTreeOperation('Sweep', 0)
|
||||
await sweep.click()
|
||||
await page.keyboard.press('Delete')
|
||||
await editor.expectEditor.not.toContain(sweepDeclaration)
|
||||
})
|
||||
})
|
||||
|
||||
@ -3627,67 +3643,6 @@ profile001 = startProfile(sketch001, at = [-20, 20])
|
||||
})
|
||||
})
|
||||
|
||||
test(`Shell dry-run validation rejects sweeps`, async ({
|
||||
context,
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
editor,
|
||||
toolbar,
|
||||
cmdBar,
|
||||
}) => {
|
||||
const initialCode = `sketch001 = startSketchOn(YZ)
|
||||
|> circle(
|
||||
center = [0, 0],
|
||||
radius = 500
|
||||
)
|
||||
sketch002 = startSketchOn(XZ)
|
||||
|> startProfile(at = [0, 0])
|
||||
|> xLine(length = -2000)
|
||||
sweep001 = sweep(sketch001, path = sketch002)
|
||||
`
|
||||
await context.addInitScript((initialCode) => {
|
||||
localStorage.setItem('persistCode', initialCode)
|
||||
}, initialCode)
|
||||
await page.setBodyDimensions({ width: 1000, height: 500 })
|
||||
await homePage.goToModelingScene()
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
// One dumb hardcoded screen pixel value
|
||||
const testPoint = { x: 500, y: 250 }
|
||||
const [clickOnSweep] = scene.makeMouseHelpers(testPoint.x, testPoint.y)
|
||||
|
||||
await test.step(`Confirm sweep exists`, async () => {
|
||||
await toolbar.closePane('code')
|
||||
await scene.expectPixelColor([231, 231, 231], testPoint, 15)
|
||||
})
|
||||
|
||||
await test.step(`Go through the Shell flow and fail validation with a toast`, async () => {
|
||||
await toolbar.shellButton.click()
|
||||
await expect
|
||||
.poll(() => page.getByText('Please select one').count())
|
||||
.toBe(1)
|
||||
await cmdBar.expectState({
|
||||
stage: 'arguments',
|
||||
currentArgKey: 'selection',
|
||||
currentArgValue: '',
|
||||
headerArguments: {
|
||||
Selection: '',
|
||||
Thickness: '',
|
||||
},
|
||||
highlightedHeaderArg: 'selection',
|
||||
commandName: 'Shell',
|
||||
})
|
||||
await clickOnSweep()
|
||||
await page.waitForTimeout(500)
|
||||
await cmdBar.progressCmdBar()
|
||||
await expect(
|
||||
page.getByText('Unable to shell with the current selection. Reason:')
|
||||
).toBeVisible()
|
||||
await page.waitForTimeout(1000)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Revolve point and click workflows', () => {
|
||||
test('Base case workflow, auto spam continue in command bar', async ({
|
||||
context,
|
||||
@ -4943,4 +4898,34 @@ path001 = startProfile(sketch001, at = [0, 0])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
test(`Point and click codemods can't run on KCL errors`, async ({
|
||||
context,
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
editor,
|
||||
toolbar,
|
||||
cmdBar,
|
||||
}) => {
|
||||
const badCode = `sketch001 = startSketchOn(XZ)
|
||||
profile001 = circle(sketch001, center = [0, 0], radius = 1)
|
||||
extrude001 = extrude(profile001 length = 1)`
|
||||
await context.addInitScript((initialCode) => {
|
||||
localStorage.setItem('persistCode', initialCode)
|
||||
}, badCode)
|
||||
await page.setBodyDimensions({ width: 1000, height: 500 })
|
||||
await homePage.goToModelingScene()
|
||||
await scene.connectionEstablished()
|
||||
|
||||
await test.step(`Start Sketch is disabled`, async () => {
|
||||
await expect(toolbar.startSketchBtn).not.toBeEnabled()
|
||||
await editor.expectEditor.toContain(badCode, { shouldNormalise: true })
|
||||
})
|
||||
|
||||
await test.step(`Helix is disabled`, async () => {
|
||||
await expect(toolbar.helixButton).not.toBeEnabled()
|
||||
await editor.expectEditor.toContain(badCode, { shouldNormalise: true })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -99,6 +99,8 @@ test.describe('edit with AI example snapshots', () => {
|
||||
await test.step('fire off edit prompt', async () => {
|
||||
await cmdBar.captureTextToCadRequestSnapshot(test.info())
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
// being specific about the color with a hex means asserting pixel color is more stable
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
|
@ -88,6 +88,8 @@ test.describe('Prompt-to-edit tests', () => {
|
||||
|
||||
await test.step('fire off edit prompt', async () => {
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
// being specific about the color with a hex means asserting pixel color is more stable
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
@ -165,6 +167,8 @@ test.describe('Prompt-to-edit tests', () => {
|
||||
|
||||
await test.step('fire of bad prompt', async () => {
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
.fill('ansheusha asnthuatshoeuhtaoetuhthaeu laughs in dvorak')
|
||||
|
@ -19,11 +19,12 @@ test.describe('Regression tests', () => {
|
||||
context,
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
}) => {
|
||||
// because the model has `line([0,0]..` it is valid code, but the model is invalid
|
||||
// regression test for https://github.com/KittyCAD/modeling-app/issues/3251
|
||||
// Since the bad model also found as issue with the artifact graph, which in tern blocked the editor diognostics
|
||||
const u = await getUtils(page)
|
||||
// const u = await getUtils(page)
|
||||
await context.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
@ -40,7 +41,8 @@ test.describe('Regression tests', () => {
|
||||
await page.setBodyDimensions({ width: 1000, height: 500 })
|
||||
|
||||
await homePage.goToModelingScene()
|
||||
await u.waitForPageLoad()
|
||||
await scene.connectionEstablished()
|
||||
// await u.waitForPageLoad()
|
||||
|
||||
// error in guter
|
||||
await expect(page.locator('.cm-lint-marker-error')).toBeVisible()
|
||||
@ -51,8 +53,11 @@ test.describe('Regression tests', () => {
|
||||
// the close doesn't work
|
||||
// when https://github.com/KittyCAD/modeling-app/issues/3268 is closed
|
||||
// this test will need updating
|
||||
const crypticErrorText = `ApiError`
|
||||
const crypticErrorText = `Cannot close a path that is non-planar or with duplicate vertices.
|
||||
Internal engine error on request`
|
||||
await expect(page.getByText(crypticErrorText).first()).toBeVisible()
|
||||
// Ensure we didn't nest the json.
|
||||
await expect(page.getByText('ApiError')).not.toBeVisible()
|
||||
})
|
||||
test('user should not have to press down twice in cmdbar', async ({
|
||||
page,
|
||||
@ -185,8 +190,8 @@ extrude001 = extrude(sketch001, length = 50)
|
||||
page.locator('.pretty-json-container >> text=myVar:"67')
|
||||
).toBeVisible()
|
||||
})
|
||||
test('ProgramMemory can be serialised', async ({ page, homePage }) => {
|
||||
const u = await getUtils(page)
|
||||
test('ProgramMemory can be serialised', async ({ page, homePage, scene }) => {
|
||||
// const u = await getUtils(page)
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
@ -211,11 +216,12 @@ extrude001 = extrude(sketch001, length = 50)
|
||||
// Listen for all console events and push the message text to an array
|
||||
page.on('console', (message) => messages.push(message.text()))
|
||||
await homePage.goToModelingScene()
|
||||
await u.waitForPageLoad()
|
||||
// await u.waitForPageLoad()
|
||||
await scene.connectionEstablished()
|
||||
|
||||
// wait for execution done
|
||||
await u.openDebugPanel()
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]')
|
||||
// await u.openDebugPanel()
|
||||
// await u.expectCmdLog('[data-message-type="execution-done"]')
|
||||
|
||||
const forbiddenMessages = ['cannot serialize tagged newtype variant']
|
||||
forbiddenMessages.forEach((forbiddenMessage) => {
|
||||
@ -229,6 +235,7 @@ extrude001 = extrude(sketch001, length = 50)
|
||||
context,
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
}) => {
|
||||
const u = await getUtils(page)
|
||||
// const PUR = 400 / 37.5 //pixeltoUnitRatio
|
||||
@ -247,11 +254,10 @@ extrude001 = extrude(sketch001, length = 50)
|
||||
shell(exampleSketch, faces = ['end'], thickness = 0.25)`
|
||||
)
|
||||
})
|
||||
await homePage.goToModelingScene()
|
||||
await scene.connectionEstablished()
|
||||
|
||||
await expect(async () => {
|
||||
await homePage.goToModelingScene()
|
||||
await u.waitForPageLoad()
|
||||
|
||||
// error in guter
|
||||
await expect(page.locator('.cm-lint-marker-error')).toBeVisible({
|
||||
timeout: 1_000,
|
||||
@ -452,12 +458,10 @@ extrude002 = extrude(profile002, length = 150)
|
||||
|
||||
// Click the stl.
|
||||
await expect(stlOption).toBeVisible()
|
||||
|
||||
await page.keyboard.press('Enter')
|
||||
|
||||
// Click the checkbox
|
||||
await expect(submitButton).toBeVisible()
|
||||
|
||||
await page.keyboard.press('Enter')
|
||||
|
||||
// Find the toast.
|
||||
@ -465,11 +469,13 @@ extrude002 = extrude(profile002, length = 150)
|
||||
await expect(exportingToastMessage).toBeVisible()
|
||||
|
||||
// Expect it to succeed.
|
||||
await expect(exportingToastMessage).not.toBeVisible({ timeout: 15_000 })
|
||||
await expect(exportingToastMessage).not.toBeVisible()
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
}
|
||||
)
|
||||
// We updated this test such that you can have multiple exports going at once.
|
||||
|
@ -1365,18 +1365,18 @@ solid001 = subtract([extrude001], tools = [extrude002])
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`fn in2mm = (inches) => {
|
||||
`fn in2mm(@inches) {
|
||||
return inches * 25.4
|
||||
}
|
||||
|
||||
const railTop = in2mm(.748)
|
||||
const railSide = in2mm(.024)
|
||||
const railBaseWidth = in2mm(.612)
|
||||
const railWideWidth = in2mm(.835)
|
||||
const railBaseLength = in2mm(.200)
|
||||
const railClampable = in2mm(.200)
|
||||
railTop = in2mm(.748)
|
||||
railSide = in2mm(.024)
|
||||
railBaseWidth = in2mm(.612)
|
||||
railWideWidth = in2mm(.835)
|
||||
railBaseLength = in2mm(.200)
|
||||
railClampable = in2mm(.200)
|
||||
|
||||
const rail = startSketchOn(XZ)
|
||||
rail = startSketchOn(XZ)
|
||||
|> startProfile(at = [-railTop / 2, railClampable + railBaseLength])
|
||||
|> line(endAbsolute = [
|
||||
railTop / 2,
|
||||
@ -3540,7 +3540,6 @@ profile001 = startProfile(sketch001, at = [127.56, 179.02])
|
||||
|
||||
await homePage.openProject('multi-file-sketch-test')
|
||||
await scene.connectionEstablished()
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
await u.closeDebugPanel()
|
||||
|
||||
@ -3555,9 +3554,6 @@ profile001 = startProfile(sketch001, at = [127.56, 179.02])
|
||||
|
||||
await toolbar.openFile('error.kcl')
|
||||
|
||||
// Ensure filetree is populated
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
await expect(
|
||||
toolbar.featureTreePane.getByRole('button', { name: 'Sketch' })
|
||||
).toHaveCount(0)
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { KCL_DEFAULT_LENGTH } from '@src/lib/constants'
|
||||
import type { CmdBarFixture } from '@e2e/playwright/fixtures/cmdBarFixture'
|
||||
import type { SceneFixture } from '@e2e/playwright/fixtures/sceneFixture'
|
||||
import { TEST_SETTINGS, TEST_SETTINGS_KEY } from '@e2e/playwright/storageStates'
|
||||
@ -9,6 +8,7 @@ import {
|
||||
settingsToToml,
|
||||
} from '@e2e/playwright/test-utils'
|
||||
import { expect, test } from '@e2e/playwright/zoo-test'
|
||||
import { KCL_DEFAULT_LENGTH } from '@src/lib/constants'
|
||||
|
||||
test.beforeEach(async ({ page, context }) => {
|
||||
// Make the user avatar image always 404
|
||||
@ -766,7 +766,7 @@ test.describe('Grid visibility', { tag: '@snapshot' }, () => {
|
||||
})
|
||||
})
|
||||
|
||||
test('theme persists', async ({ page, context }) => {
|
||||
test('theme persists', async ({ page, context, homePage }) => {
|
||||
const u = await getUtils(page)
|
||||
await context.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
@ -784,7 +784,7 @@ test('theme persists', async ({ page, context }) => {
|
||||
|
||||
await page.setViewportSize({ width: 1200, height: 500 })
|
||||
|
||||
await u.waitForAuthSkipAppStart()
|
||||
await homePage.goToModelingScene()
|
||||
await page.waitForTimeout(500)
|
||||
|
||||
// await page.getByRole('link', { name: 'Settings Settings (tooltip)' }).click()
|
||||
@ -812,7 +812,7 @@ test('theme persists', async ({ page, context }) => {
|
||||
// Disconnect and reconnect to check the theme persists through a reload
|
||||
|
||||
// Expect the network to be down
|
||||
await expect(networkToggle).toContainText('Offline')
|
||||
await expect(networkToggle).toContainText('Problem')
|
||||
|
||||
// simulate network up
|
||||
await u.emulateNetworkConditions({
|
||||
@ -873,6 +873,50 @@ sweepSketch = startSketchOn(XY)
|
||||
mask: lowerRightMasks(page),
|
||||
})
|
||||
})
|
||||
test('code color goober works with single quotes', async ({
|
||||
page,
|
||||
context,
|
||||
scene,
|
||||
cmdBar,
|
||||
}) => {
|
||||
const u = await getUtils(page)
|
||||
await context.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`// Create a pipe using a sweep.
|
||||
|
||||
// Create a path for the sweep.
|
||||
sweepPath = startSketchOn(XZ)
|
||||
|> startProfile(at = [0.05, 0.05])
|
||||
|> line(end = [0, 7])
|
||||
|> tangentialArc(angle = 90, radius = 5)
|
||||
|> line(end = [-3, 0])
|
||||
|> tangentialArc(angle = -90, radius = 5)
|
||||
|> line(end = [0, 7])
|
||||
|
||||
sweepSketch = startSketchOn(XY)
|
||||
|> startProfile(at = [2, 0])
|
||||
|> arc(angleStart = 0, angleEnd = 360, radius = 2)
|
||||
|> sweep(path = sweepPath)
|
||||
|> appearance(
|
||||
color = '#bb00ff',
|
||||
metalness = 90,
|
||||
roughness = 90
|
||||
)
|
||||
`
|
||||
)
|
||||
})
|
||||
|
||||
await page.setViewportSize({ width: 1200, height: 1000 })
|
||||
await u.waitForAuthSkipAppStart()
|
||||
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
await expect(page, 'expect small color widget').toHaveScreenshot({
|
||||
maxDiffPixels: 100,
|
||||
mask: lowerRightMasks(page),
|
||||
})
|
||||
})
|
||||
|
||||
test('code color goober opening window', async ({
|
||||
page,
|
||||
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 47 KiB After Width: | Height: | Size: 47 KiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
Before Width: | Height: | Size: 74 KiB After Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 132 KiB |
Before Width: | Height: | Size: 116 KiB After Width: | Height: | Size: 116 KiB |
After Width: | Height: | Size: 132 KiB |
Before Width: | Height: | Size: 65 KiB After Width: | Height: | Size: 65 KiB |
Before Width: | Height: | Size: 68 KiB After Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 64 KiB After Width: | Height: | Size: 64 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 66 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 63 KiB |
After Width: | Height: | Size: 47 KiB |
@ -1,19 +1,22 @@
|
||||
import type { EngineCommand } from '@src/lang/std/artifactGraph'
|
||||
import { uuidv4 } from '@src/lib/utils'
|
||||
|
||||
import { commonPoints, getUtils } from '@e2e/playwright/test-utils'
|
||||
import {
|
||||
commonPoints,
|
||||
getUtils,
|
||||
TEST_COLORS,
|
||||
circleMove,
|
||||
} from '@e2e/playwright/test-utils'
|
||||
import { expect, test } from '@e2e/playwright/zoo-test'
|
||||
|
||||
test.describe(
|
||||
'Test network and connection issues',
|
||||
{
|
||||
tag: ['@macos', '@windows'],
|
||||
},
|
||||
() => {
|
||||
test.describe('Test network related behaviors', () => {
|
||||
test(
|
||||
'simulate network down and network little widget',
|
||||
{ tag: '@skipLocalEngine' },
|
||||
async ({ page, homePage }) => {
|
||||
const networkToggleConnectedText = page.getByText('Connected')
|
||||
const networkToggleWeakText = page.getByText('Network health (Weak)')
|
||||
|
||||
const u = await getUtils(page)
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
|
||||
@ -34,7 +37,9 @@ test.describe(
|
||||
await expect(networkPopover).not.toBeVisible()
|
||||
|
||||
// (First check) Expect the network to be up
|
||||
await expect(networkToggle).toContainText('Connected')
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
|
||||
// Click the network widget
|
||||
await networkWidget.click()
|
||||
@ -82,7 +87,9 @@ test.describe(
|
||||
).not.toBeDisabled({ timeout: 15000 })
|
||||
|
||||
// (Second check) expect the network to be up
|
||||
await expect(networkToggle).toContainText('Connected')
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
}
|
||||
)
|
||||
|
||||
@ -91,6 +98,8 @@ test.describe(
|
||||
{ tag: '@skipLocalEngine' },
|
||||
async ({ page, homePage, toolbar, scene, cmdBar }) => {
|
||||
const networkToggle = page.getByTestId('network-toggle')
|
||||
const networkToggleConnectedText = page.getByText('Connected')
|
||||
const networkToggleWeakText = page.getByText('Network health (Weak)')
|
||||
|
||||
const u = await getUtils(page)
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
@ -109,7 +118,7 @@ test.describe(
|
||||
await page.mouse.click(700, 200)
|
||||
|
||||
await expect(page.locator('.cm-content')).toHaveText(
|
||||
`sketch001 = startSketchOn(XZ)`
|
||||
`@settings(defaultLengthUnit = in)sketch001 = startSketchOn(XZ)`
|
||||
)
|
||||
await u.closeDebugPanel()
|
||||
|
||||
@ -118,7 +127,7 @@ test.describe(
|
||||
const startXPx = 600
|
||||
await page.mouse.click(startXPx + PUR * 10, 500 - PUR * 10)
|
||||
await expect(page.locator('.cm-content')).toHaveText(
|
||||
`sketch001 = startSketchOn(XZ)profile001 = startProfile(sketch001, at = ${commonPoints.startAt})`
|
||||
`@settings(defaultLengthUnit = in)sketch001 = startSketchOn(XZ)profile001 = startProfile(sketch001, at = ${commonPoints.startAt})`
|
||||
)
|
||||
await page.waitForTimeout(100)
|
||||
|
||||
@ -127,11 +136,14 @@ test.describe(
|
||||
|
||||
await expect(
|
||||
page.locator('.cm-content')
|
||||
).toHaveText(`sketch001 = startSketchOn(XZ)profile001 = startProfile(sketch001, at = ${commonPoints.startAt})
|
||||
).toHaveText(`@settings(defaultLengthUnit = in)sketch001 = startSketchOn(XZ)profile001 = startProfile(sketch001, at = ${commonPoints.startAt})
|
||||
|> xLine(length = ${commonPoints.num1})`)
|
||||
|
||||
// Expect the network to be up
|
||||
await expect(networkToggle).toContainText('Connected')
|
||||
await networkToggle.hover()
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
|
||||
// simulate network down
|
||||
await u.emulateNetworkConditions({
|
||||
@ -143,6 +155,7 @@ test.describe(
|
||||
})
|
||||
|
||||
// Expect the network to be down
|
||||
await networkToggle.hover()
|
||||
await expect(networkToggle).toContainText('Problem')
|
||||
|
||||
// Ensure we are not in sketch mode
|
||||
@ -168,7 +181,11 @@ test.describe(
|
||||
).not.toBeDisabled({ timeout: 15000 })
|
||||
|
||||
// Expect the network to be up
|
||||
await expect(networkToggle).toContainText('Connected')
|
||||
await networkToggle.hover()
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
|
||||
await scene.settled(cmdBar)
|
||||
|
||||
// Click off the code pane.
|
||||
@ -183,9 +200,7 @@ test.describe(
|
||||
await toolbar.editSketch()
|
||||
|
||||
// Click the line tool
|
||||
await page
|
||||
.getByRole('button', { name: 'line Line', exact: true })
|
||||
.click()
|
||||
await page.getByRole('button', { name: 'line Line', exact: true }).click()
|
||||
|
||||
await page.waitForTimeout(150)
|
||||
|
||||
@ -219,7 +234,10 @@ test.describe(
|
||||
await page.mouse.click(startXPx + PUR * 20, 500 - PUR * 20)
|
||||
await expect
|
||||
.poll(u.normalisedEditorCode)
|
||||
.toBe(`sketch001 = startSketchOn(XZ)
|
||||
.toBe(`@settings(defaultLengthUnit = in)
|
||||
|
||||
|
||||
sketch001 = startSketchOn(XZ)
|
||||
profile001 = startProfile(sketch001, at = [12.34, -12.34])
|
||||
|> xLine(length = 12.34)
|
||||
|> line(end = [-12.34, 12.34])
|
||||
@ -230,7 +248,10 @@ profile001 = startProfile(sketch001, at = [12.34, -12.34])
|
||||
|
||||
await expect
|
||||
.poll(u.normalisedEditorCode)
|
||||
.toBe(`sketch001 = startSketchOn(XZ)
|
||||
.toBe(`@settings(defaultLengthUnit = in)
|
||||
|
||||
|
||||
sketch001 = startSketchOn(XZ)
|
||||
profile001 = startProfile(sketch001, at = [12.34, -12.34])
|
||||
|> xLine(length = 12.34)
|
||||
|> line(end = [-12.34, 12.34])
|
||||
@ -255,5 +276,88 @@ profile001 = startProfile(sketch001, at = [12.34, -12.34])
|
||||
).not.toBeVisible()
|
||||
}
|
||||
)
|
||||
|
||||
test(
|
||||
'Paused stream freezes view frame, unpause reconnect is seamless to user',
|
||||
{ tag: ['@electron', '@skipLocalEngine'] },
|
||||
async ({ page, homePage, scene, cmdBar, toolbar, tronApp }) => {
|
||||
const networkToggle = page.getByTestId('network-toggle')
|
||||
const networkToggleConnectedText = page.getByText('Connected')
|
||||
const networkToggleWeakText = page.getByText('Network health (Weak)')
|
||||
|
||||
if (!tronApp) {
|
||||
fail()
|
||||
}
|
||||
|
||||
await tronApp.cleanProjectDir({
|
||||
app: {
|
||||
stream_idle_mode: 5000,
|
||||
},
|
||||
})
|
||||
|
||||
await page.addInitScript(async () => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`sketch001 = startSketchOn(XY)
|
||||
profile001 = startProfile(sketch001, at = [0.0, 0.0])
|
||||
|> line(end = [10.0, 0])
|
||||
|> line(end = [0, 10.0])
|
||||
|> close()`
|
||||
)
|
||||
})
|
||||
|
||||
const dim = { width: 1200, height: 500 }
|
||||
await page.setBodyDimensions(dim)
|
||||
|
||||
await test.step('Go to modeling scene', async () => {
|
||||
await homePage.goToModelingScene()
|
||||
await scene.settled(cmdBar)
|
||||
})
|
||||
|
||||
await test.step('Verify pausing behavior', async () => {
|
||||
// Wait 5s + 1s to pause.
|
||||
await page.waitForTimeout(6000)
|
||||
|
||||
// We should now be paused. To the user, it should appear we're still
|
||||
// connected.
|
||||
await networkToggle.hover()
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
|
||||
const center = {
|
||||
x: dim.width / 2,
|
||||
y: dim.height / 2,
|
||||
}
|
||||
|
||||
let probe = { x: 0, y: 0 }
|
||||
|
||||
// ... and the model's still visibly there
|
||||
probe.x = center.x + dim.width / 100
|
||||
probe.y = center.y
|
||||
await scene.expectPixelColor(TEST_COLORS.GREY, probe, 15)
|
||||
probe = { ...center }
|
||||
|
||||
// Now move the mouse around to unpause!
|
||||
await circleMove(page, probe.x, probe.y, 20, 10)
|
||||
|
||||
// ONCE AGAIN! Check the view area hasn't changed at all.
|
||||
// Check the pixel a couple times as it reconnects.
|
||||
// NOTE: Remember, idle behavior is still on at this point -
|
||||
// if this test takes longer than 5s shit WILL go south!
|
||||
probe.x = center.x + dim.width / 100
|
||||
probe.y = center.y
|
||||
await scene.expectPixelColor(TEST_COLORS.GREY, probe, 15)
|
||||
await page.waitForTimeout(1000)
|
||||
await scene.expectPixelColor(TEST_COLORS.GREY, probe, 15)
|
||||
probe = { ...center }
|
||||
|
||||
// Ensure we're still connected
|
||||
await networkToggle.hover()
|
||||
await expect(
|
||||
networkToggleConnectedText.or(networkToggleWeakText)
|
||||
).toBeVisible()
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -44,6 +44,8 @@ export const lowerRightMasks = (page: Page) => [
|
||||
export type TestColor = [number, number, number]
|
||||
export const TEST_COLORS: { [key: string]: TestColor } = {
|
||||
WHITE: [249, 249, 249],
|
||||
OFFWHITE: [237, 237, 237],
|
||||
GREY: [142, 142, 142],
|
||||
YELLOW: [255, 255, 0],
|
||||
BLUE: [0, 0, 255],
|
||||
DARK_MODE_BKGD: [27, 27, 27],
|
||||
@ -549,11 +551,6 @@ export async function getUtils(page: Page, test_?: typeof test) {
|
||||
|
||||
createNewFile: async (name: string) => {
|
||||
return test?.step(`Create a file named ${name}`, async () => {
|
||||
// If the application is in the middle of connecting a stream
|
||||
// then creating a new file won't work in the end.
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'Start Sketch' })
|
||||
).not.toBeDisabled()
|
||||
await page.getByTestId('create-file-button').click()
|
||||
await page.getByTestId('tree-input-field').fill(name)
|
||||
await page.keyboard.press('Enter')
|
||||
|
@ -94,7 +94,6 @@
|
||||
"build:wasm:dev": "./scripts/build-wasm-dev.sh",
|
||||
"build:wasm:dev:windows": "powershell -ExecutionPolicy Bypass -File ./scripts/build-wasm-dev.ps1",
|
||||
"pretest": "npm run remove-importmeta",
|
||||
"test:rust": "(cd rust && just test && just lint)",
|
||||
"simpleserver": "npm run pretest && http-server ./public --cors -p 3000",
|
||||
"simpleserver:ci": "npm run pretest && http-server ./public --cors -p 3000 &",
|
||||
"simpleserver:bg": "npm run pretest && http-server ./public --cors -p 3000 &",
|
||||
@ -130,15 +129,14 @@
|
||||
"tronb:package:prod": "npm run tronb:vite:prod && electron-builder --config electron-builder.yml --publish always",
|
||||
"test-setup": "npm install && npm run build:wasm",
|
||||
"test": "vitest --mode development",
|
||||
"test:rust": "(cd rust && just test && just lint)",
|
||||
"test:snapshots": "PLATFORM=web NODE_ENV=development playwright test --config=playwright.config.ts --grep=@snapshot --trace=on --shard=1/1",
|
||||
"test:unit": "vitest run --mode development --exclude **/kclSamples.test.ts --exclude **/jest-component-unit-tests/*",
|
||||
"test:unit": "vitest run --mode development --exclude **/jest-component-unit-tests/*",
|
||||
"test:unit:components": "jest -c jest-component-unit-tests/jest.config.ts --rootDir jest-component-unit-tests/",
|
||||
"test:unit:kcl-samples": "vitest run --mode development ./src/lang/kclSamples.test.ts",
|
||||
"test:playwright:electron": "playwright test --config=playwright.electron.config.ts --grep-invert=@snapshot",
|
||||
"test:playwright:electron:local": "npm run tronb:vite:dev && playwright test --config=playwright.electron.config.ts --grep-invert=@snapshot --grep-invert=\"$(curl --silent https://test-analysis-bot.hawk-dinosaur.ts.net/projects/KittyCAD/modeling-app/tests/disabled/regex)\"",
|
||||
"test:playwright:electron:local-engine": "npm run tronb:vite:dev && playwright test --config=playwright.electron.config.ts --grep-invert='@snapshot|@skipLocalEngine' --grep-invert=\"$(curl --silent https://test-analysis-bot.hawk-dinosaur.ts.net/projects/KittyCAD/modeling-app/tests/disabled/regex)\"",
|
||||
"test:unit:local": "npm run simpleserver:bg && npm run test:unit; kill-port 3000",
|
||||
"test:unit:kcl-samples:local": "npm run simpleserver:bg && npm run test:unit:kcl-samples; kill-port 3000"
|
||||
"test:unit:local": "npm run simpleserver:bg && npm run test:unit; kill-port 3000"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
@ -57,9 +57,9 @@ fn connectorSketch(@plane, start) {
|
||||
|
||||
export fn connector(@plane, length) {
|
||||
connectorSketch(plane, start = [-12, 8])
|
||||
|> extrude(length = length)
|
||||
|> extrude(length)
|
||||
connectorSketch(plane, start = [16, 8])
|
||||
|> extrude(length = length)
|
||||
|> extrude(length)
|
||||
return 0
|
||||
}
|
||||
|
||||
@ -79,7 +79,7 @@ fn seatSlatSketch(@plane) {
|
||||
|
||||
export fn seatSlats(@plane, length) {
|
||||
seatSlatSketch(plane)
|
||||
|> extrude(length = length)
|
||||
|> extrude(length)
|
||||
return 0
|
||||
}
|
||||
|
||||
@ -99,7 +99,7 @@ fn backSlatsSketch(@plane) {
|
||||
|
||||
export fn backSlats(@plane, length) {
|
||||
b = backSlatsSketch(plane)
|
||||
|> extrude(length = length)
|
||||
|> extrude(length)
|
||||
return b
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,7 @@ holeDia = 4
|
||||
sketch001 = startSketchOn(XY)
|
||||
|> startProfile(at = [0, 0])
|
||||
|> angledLine(angle = 0, length = width, tag = $rectangleSegmentA001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) + 90, length = length, tag = $rectangleSegmentB001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) + 90, length, tag = $rectangleSegmentB001)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001), length = -segLen(rectangleSegmentA001), tag = $rectangleSegmentC001)
|
||||
|> line(endAbsolute = [profileStartX(%), profileStartY(%)], tag = $rectangleSegmentD001)
|
||||
|> close()
|
||||
@ -74,7 +74,7 @@ function001([
|
||||
sketch003 = startSketchOn(XY)
|
||||
|> startProfile(at = [width * 1.2, 0])
|
||||
|> angledLine(angle = 0, length = width, tag = $rectangleSegmentA002)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) + 90, length = length, tag = $rectangleSegmentB002)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001) + 90, length, tag = $rectangleSegmentB002)
|
||||
|> angledLine(angle = segAng(rectangleSegmentA001), length = -segLen(rectangleSegmentA001), tag = $rectangleSegmentC002)
|
||||
|> line(endAbsolute = [profileStartX(%), profileStartY(%)], tag = $rectangleSegmentD002)
|
||||
|> close()
|
||||
|
@ -1,12 +1,12 @@
|
||||
[test-groups]
|
||||
# If a test uses the engine, we want to limit the number that can run in parallel.
|
||||
# This way we don't start and stop too many engine instances, putting pressure on our cloud.
|
||||
uses-engine = { max-threads = 4 }
|
||||
uses-engine = { max-threads = 32 }
|
||||
# If a test must run after the engine tests, we want to make sure the engine tests are done first.
|
||||
after-engine = { max-threads = 12 }
|
||||
after-engine = { max-threads = 32 }
|
||||
|
||||
[profile.default]
|
||||
slow-timeout = { period = "180s", terminate-after = 1 }
|
||||
slow-timeout = { period = "280s", terminate-after = 1 }
|
||||
|
||||
[profile.ci]
|
||||
slow-timeout = { period = "280s", terminate-after = 5 }
|
||||
|
127
rust/Cargo.lock
generated
@ -1815,7 +1815,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-bumper"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1826,7 +1826,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-derive-docs"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"anyhow",
|
||||
@ -1845,8 +1845,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-directory-test-macro"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
@ -1854,7 +1855,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-language-server"
|
||||
version = "0.2.69"
|
||||
version = "0.2.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1875,7 +1876,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-language-server-release"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1895,7 +1896,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.2.69"
|
||||
version = "0.2.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx 0.5.1",
|
||||
@ -1934,6 +1935,7 @@ dependencies = [
|
||||
"measurements",
|
||||
"miette",
|
||||
"mime_guess",
|
||||
"nalgebra-glm",
|
||||
"parse-display 0.10.0",
|
||||
"pretty_assertions",
|
||||
"pyo3",
|
||||
@ -1971,7 +1973,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-python-bindings"
|
||||
version = "0.3.69"
|
||||
version = "0.3.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"kcl-lib",
|
||||
@ -1986,7 +1988,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-test-server"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"hyper 0.14.32",
|
||||
@ -1999,7 +2001,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-to-core"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -2013,7 +2015,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-wasm-lib"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bson",
|
||||
@ -2080,9 +2082,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kittycad-modeling-cmds"
|
||||
version = "0.2.120"
|
||||
version = "0.2.121"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48b71e06ee5d711d0085864a756fb6a304531246689ea00c6ef5d740670c3701"
|
||||
checksum = "94ba95c22493d79ec8a1faab963d8903f6de0e373efedf2bc3bb76a0ddbab036"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@ -2253,6 +2255,16 @@ dependencies = [
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matrixmultiply"
|
||||
version = "0.3.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a06de3016e9fae57a36fd14dba131fccf49f74b40b7fbdb472f96e361ec71a08"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"rawpointer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "measurements"
|
||||
version = "0.11.0"
|
||||
@ -2373,6 +2385,33 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nalgebra"
|
||||
version = "0.33.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26aecdf64b707efd1310e3544d709c5c0ac61c13756046aaaba41be5c4f66a3b"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"matrixmultiply",
|
||||
"num-complex",
|
||||
"num-rational",
|
||||
"num-traits 0.2.19",
|
||||
"simba",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nalgebra-glm"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e441f43bccdf40cb6bd4294321e6983c5bc7b9886112d19fd4c9813976b117e4"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"nalgebra",
|
||||
"num-traits 0.2.19",
|
||||
"simba",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "newline-converter"
|
||||
version = "0.3.0"
|
||||
@ -2412,6 +2451,15 @@ dependencies = [
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
|
||||
dependencies = [
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
@ -2442,6 +2490,17 @@ dependencies = [
|
||||
"num-modular",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-integer",
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.1.43"
|
||||
@ -2595,6 +2654,12 @@ dependencies = [
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||
|
||||
[[package]]
|
||||
name = "pbkdf2"
|
||||
version = "0.12.2"
|
||||
@ -3093,6 +3158,12 @@ dependencies = [
|
||||
"getrandom 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rawpointer"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.10.0"
|
||||
@ -3376,6 +3447,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "safe_arch"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
@ -3631,6 +3711,19 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simba"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3a386a501cd104797982c15ae17aafe8b9261315b5d07e3ec803f2ea26be0fa"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"num-complex",
|
||||
"num-traits 0.2.19",
|
||||
"paste",
|
||||
"wide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
@ -4731,6 +4824,16 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wide"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41b5576b9a81633f3e8df296ce0063042a73507636cbe956c61133dd7034ab22"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"safe_arch",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
[package]
|
||||
name = "kcl-bumper"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/KittyCAD/modeling-api"
|
||||
rust-version = "1.76"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-derive-docs"
|
||||
description = "A tool for generating documentation from Rust derive macros"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-directory-test-macro"
|
||||
description = "A tool for generating tests from a directory of kcl files"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -11,6 +11,7 @@ proc-macro = true
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
convert_case = "0.8.0"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "2.0.96", features = ["full"] }
|
||||
|
@ -1,10 +1,13 @@
|
||||
use std::fs;
|
||||
|
||||
use convert_case::Casing;
|
||||
use proc_macro::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{parse_macro_input, LitStr};
|
||||
|
||||
/// A macro that generates test functions for each directory within a given path.
|
||||
/// To be included the test directory must have a main.kcl file.
|
||||
/// This will also recursively search for directories within the given path.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
@ -45,7 +48,11 @@ pub fn test_all_dirs(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
|
||||
// Generate a test function for each directory
|
||||
let test_fns = dirs.iter().map(|(dir_name, dir_path)| {
|
||||
let test_fn_name = format_ident!("{}_{}", fn_name, sanitize_dir_name(dir_name));
|
||||
let relative_path = dir_path
|
||||
.strip_prefix(&path.to_string_lossy().to_string())
|
||||
.unwrap()
|
||||
.trim();
|
||||
let test_fn_name = format_ident!("{}_{}", fn_name, sanitize_dir_name(relative_path));
|
||||
let dir_name_str = dir_name.clone();
|
||||
let dir_path_str = dir_path.clone();
|
||||
|
||||
@ -75,16 +82,26 @@ fn get_all_directories(path: &std::path::Path) -> Result<Vec<(String, String)>,
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let new_path = entry.path();
|
||||
|
||||
if path.is_dir() && !IGNORE_DIRS.contains(&path.file_name().and_then(|name| name.to_str()).unwrap_or("")) {
|
||||
let dir_name = path
|
||||
if new_path.is_dir()
|
||||
&& !IGNORE_DIRS.contains(&new_path.file_name().and_then(|name| name.to_str()).unwrap_or(""))
|
||||
{
|
||||
// Check if the directory contains a main.kcl file.
|
||||
let main_kcl_path = new_path.join("main.kcl");
|
||||
if !main_kcl_path.exists() {
|
||||
// Recurse into the directory.
|
||||
let sub_dirs = get_all_directories(&new_path)?;
|
||||
dirs.extend(sub_dirs);
|
||||
continue;
|
||||
}
|
||||
let dir_name = new_path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
let dir_path = path.to_str().unwrap_or("unknown").to_string();
|
||||
let dir_path = new_path.to_str().unwrap_or("unknown").to_string();
|
||||
|
||||
dirs.push((dir_name, dir_path));
|
||||
}
|
||||
@ -95,10 +112,9 @@ fn get_all_directories(path: &std::path::Path) -> Result<Vec<(String, String)>,
|
||||
|
||||
/// Sanitize directory name to create a valid Rust identifier
|
||||
fn sanitize_dir_name(name: &str) -> String {
|
||||
let name = name.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "_");
|
||||
if name.chars().next().is_some_and(|c| c.is_numeric()) {
|
||||
format!("d_{}", name)
|
||||
} else {
|
||||
name
|
||||
}
|
||||
let binding = name
|
||||
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "_")
|
||||
.replace("/", "_");
|
||||
let name = binding.trim_start_matches('_').to_string();
|
||||
name.to_case(convert_case::Case::Snake)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "kcl-language-server-release"
|
||||
version = "0.1.69"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
|
||||
publish = false
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "kcl-language-server"
|
||||
description = "A language server for KCL."
|
||||
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
|
||||
version = "0.2.69"
|
||||
version = "0.2.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.2.69"
|
||||
version = "0.2.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -50,6 +50,7 @@ lazy_static = { workspace = true }
|
||||
measurements = "0.11.0"
|
||||
miette = { workspace = true }
|
||||
mime_guess = "2.0.5"
|
||||
nalgebra-glm = "0.19.0"
|
||||
parse-display = "0.10.0"
|
||||
pyo3 = { workspace = true, optional = true }
|
||||
regex = "1.11.1"
|
||||
|
@ -1,5 +1,7 @@
|
||||
//! Cache testing framework.
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use kcl_lib::NodePathStep;
|
||||
use kcl_lib::{bust_cache, ExecError, ExecOutcome};
|
||||
use kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
@ -259,18 +261,23 @@ extrude(profile001, length = 100)"#
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_cache_add_line_preserves_artifact_commands() {
|
||||
let code = r#"sketch001 = startSketchOn(XY)
|
||||
|> startProfile(at = [5.5229, 5.25217])
|
||||
|> line(end = [10.50433, -1.19122])
|
||||
|> line(end = [8.01362, -5.48731])
|
||||
|> line(end = [-1.02877, -6.76825])
|
||||
|> line(end = [-11.53311, 2.81559])
|
||||
profile001 = startProfile(sketch001, at = [5.5, 5.25])
|
||||
|> line(end = [10.5, -1.19])
|
||||
|> line(end = [8, -5.5])
|
||||
|> line(end = [-1.02, -6.76])
|
||||
|> line(end = [-11.5, 2.8])
|
||||
|> close()
|
||||
plane001 = offsetPlane(XY, offset = 20)
|
||||
"#;
|
||||
// Use a new statement; don't extend the prior pipeline. This allows us to
|
||||
// detect a prefix.
|
||||
let code_with_extrude = code.to_owned()
|
||||
+ r#"
|
||||
extrude(sketch001, length = 4)
|
||||
profile002 = startProfile(plane001, at = [0, 0])
|
||||
|> line(end = [0, 10])
|
||||
|> line(end = [10, 0])
|
||||
|> close()
|
||||
extrude001 = extrude(profile001, length = 4)
|
||||
"#;
|
||||
|
||||
let result = cache_test(
|
||||
@ -305,6 +312,58 @@ extrude(sketch001, length = 4)
|
||||
first.artifact_graph.len(),
|
||||
second.artifact_graph.len()
|
||||
);
|
||||
// Make sure we have NodePaths referring to the old code.
|
||||
let graph = &second.artifact_graph;
|
||||
assert!(!graph.is_empty());
|
||||
for artifact in graph.values() {
|
||||
assert!(!artifact.code_ref().map(|c| c.node_path.is_empty()).unwrap_or(false));
|
||||
assert!(
|
||||
!artifact
|
||||
.face_code_ref()
|
||||
// TODO: This fails, but it shouldn't.
|
||||
// .map(|c| c.node_path.is_empty())
|
||||
// Allowing the NodePath to be empty if the SourceRange is [0,
|
||||
// 0] as a more lenient check.
|
||||
.map(|c| !c.range.is_synthetic() && c.node_path.is_empty())
|
||||
.unwrap_or(false),
|
||||
"artifact={:?}",
|
||||
artifact
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_cache_add_offset_plane_computes_node_path() {
|
||||
let code = r#"sketch001 = startSketchOn(XY)
|
||||
profile001 = startProfile(sketch001, at = [0, 0])
|
||||
"#;
|
||||
let code_with_more = code.to_owned()
|
||||
+ r#"plane001 = offsetPlane(XY, offset = 500)
|
||||
"#;
|
||||
|
||||
let result = cache_test(
|
||||
"add_offset_plane_preserves_artifact_commands",
|
||||
vec![
|
||||
Variation {
|
||||
code,
|
||||
other_files: vec![],
|
||||
settings: &Default::default(),
|
||||
},
|
||||
Variation {
|
||||
code: code_with_more.as_str(),
|
||||
other_files: vec![],
|
||||
settings: &Default::default(),
|
||||
},
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let second = &result.last().unwrap().2;
|
||||
|
||||
let v = second.artifact_graph.values().collect::<Vec<_>>();
|
||||
let path_step = &v[2].code_ref().unwrap().node_path.steps[0];
|
||||
assert_eq!(*path_step, NodePathStep::ProgramBodyItem { index: 2 });
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
|
@ -953,36 +953,6 @@ sketch001 = startSketchOn(box, face = END)
|
||||
assert_out("revolve_on_edge", &result);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_revolve_on_edge_get_edge() {
|
||||
let code = r#"box = startSketchOn(XY)
|
||||
|> startProfile(at = [0, 0])
|
||||
|> line(end = [0, 10])
|
||||
|> line(end = [10, 0])
|
||||
|> line(end = [0, -10], tag = $revolveAxis)
|
||||
|> close()
|
||||
|> extrude(length = 10)
|
||||
|
||||
sketch001 = startSketchOn(box, face = revolveAxis)
|
||||
|> startProfile(at = [5, 10])
|
||||
|> line(end = [0, -10])
|
||||
|> line(end = [2, 0])
|
||||
|> line(end = [0, 10])
|
||||
|> close()
|
||||
|> revolve(axis = revolveAxis, angle = 90)
|
||||
|
||||
"#;
|
||||
|
||||
let result = execute_and_snapshot(code, None).await;
|
||||
|
||||
result.unwrap_err();
|
||||
//this fails right now, but slightly differently, lets just say its enough for it to fail - mike
|
||||
//assert_eq!(
|
||||
// result.err().unwrap().to_string(),
|
||||
// r#"engine: KclErrorDetails { source_ranges: [SourceRange([346, 390, 0])], message: "Modeling command failed: [ApiError { error_code: InternalEngine, message: \"Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis\" }]" }"#
|
||||
//);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_revolve_on_face_circle_edge() {
|
||||
let code = r#"box = startSketchOn(XY)
|
||||
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
After Width: | Height: | Size: 19 KiB |
After Width: | Height: | Size: 26 KiB |
@ -788,6 +788,7 @@ impl ArgData {
|
||||
Some("Axis2d | Edge") | Some("Axis3d | Edge") => Some((index, format!(r#"{label}${{{index}:X}}"#))),
|
||||
Some("Edge") => Some((index, format!(r#"{label}${{{index}:tag_or_edge_fn}}"#))),
|
||||
Some("[Edge; 1+]") => Some((index, format!(r#"{label}[${{{index}:tag_or_edge_fn}}]"#))),
|
||||
Some("Plane") => Some((index, format!(r#"{label}${{{}:XY}}"#, index))),
|
||||
|
||||
Some("string") => Some((index, format!(r#"{label}${{{}:"string"}}"#, index))),
|
||||
Some("bool") => Some((index, format!(r#"{label}${{{}:false}}"#, index))),
|
||||
|
@ -1167,6 +1167,16 @@ mod tests {
|
||||
assert_eq!(snippet, r#"clone(${0:part001})"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_autocomplete_snippet_offset_plane() {
|
||||
let data = kcl_doc::walk_prelude();
|
||||
let DocData::Fn(offset_plane_fn) = data.find_by_name("offsetPlane").unwrap() else {
|
||||
panic!();
|
||||
};
|
||||
let snippet = offset_plane_fn.to_autocomplete_snippet();
|
||||
assert_eq!(snippet, r#"offsetPlane(${0:XY}, offset = ${1:3.14})"#);
|
||||
}
|
||||
|
||||
// We want to test the snippets we compile at lsp start.
|
||||
#[test]
|
||||
fn get_all_stdlib_autocomplete_snippets() {
|
||||
|
@ -223,6 +223,26 @@ impl EngineConnection {
|
||||
message: errors.iter().map(|e| e.message.clone()).collect::<Vec<_>>().join("\n"),
|
||||
source_ranges: vec![source_range],
|
||||
})
|
||||
} else if let Ok(data) =
|
||||
serde_json::from_str::<Vec<kittycad_modeling_cmds::websocket::FailureWebSocketResponse>>(&err_str)
|
||||
{
|
||||
if let Some(data) = data.first() {
|
||||
// It could also be an array of responses.
|
||||
KclError::Engine(KclErrorDetails {
|
||||
message: data
|
||||
.errors
|
||||
.iter()
|
||||
.map(|e| e.message.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
source_ranges: vec![source_range],
|
||||
})
|
||||
} else {
|
||||
KclError::Engine(KclErrorDetails {
|
||||
message: "Received empty response from engine".into(),
|
||||
source_ranges: vec![source_range],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
KclError::Engine(KclErrorDetails {
|
||||
message: format!("Failed to wait for promise from send modeling command: {:?}", e),
|
||||
|
@ -764,7 +764,12 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
WebSocketResponse::Failure(fail) => {
|
||||
let _request_id = fail.request_id;
|
||||
Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("Modeling command failed: {:?}", fail.errors),
|
||||
message: fail
|
||||
.errors
|
||||
.iter()
|
||||
.map(|e| e.message.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n"),
|
||||
source_ranges: vec![source_range],
|
||||
}))
|
||||
}
|
||||
@ -807,7 +812,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
})
|
||||
})?;
|
||||
return Err(KclError::Engine(KclErrorDetails {
|
||||
message: format!("Modeling command failed: {:?}", errors),
|
||||
message: errors.iter().map(|e| e.message.clone()).collect::<Vec<_>>().join("\n"),
|
||||
source_ranges: vec![source_range],
|
||||
}));
|
||||
}
|
||||
|
@ -129,6 +129,7 @@ impl From<KclErrorWithOutputs> for KclError {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct KclErrorWithOutputs {
|
||||
pub error: KclError,
|
||||
pub non_fatal: Vec<CompilationError>,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub operations: Vec<Operation>,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -141,8 +142,10 @@ pub struct KclErrorWithOutputs {
|
||||
}
|
||||
|
||||
impl KclErrorWithOutputs {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
error: KclError,
|
||||
non_fatal: Vec<CompilationError>,
|
||||
#[cfg(feature = "artifact-graph")] operations: Vec<Operation>,
|
||||
#[cfg(feature = "artifact-graph")] artifact_commands: Vec<ArtifactCommand>,
|
||||
#[cfg(feature = "artifact-graph")] artifact_graph: ArtifactGraph,
|
||||
@ -152,6 +155,7 @@ impl KclErrorWithOutputs {
|
||||
) -> Self {
|
||||
Self {
|
||||
error,
|
||||
non_fatal,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -166,6 +170,7 @@ impl KclErrorWithOutputs {
|
||||
pub fn no_outputs(error: KclError) -> Self {
|
||||
Self {
|
||||
error,
|
||||
non_fatal: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
|
@ -115,7 +115,7 @@ where
|
||||
seq.end()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq, ts_rs::TS)]
|
||||
#[derive(Debug, Clone, Default, Serialize, PartialEq, Eq, ts_rs::TS)]
|
||||
#[ts(export_to = "Artifact.ts")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CodeRef {
|
||||
@ -396,7 +396,6 @@ pub enum Artifact {
|
||||
Cap(Cap),
|
||||
SweepEdge(SweepEdge),
|
||||
EdgeCut(EdgeCut),
|
||||
#[expect(unused)]
|
||||
EdgeCutEdge(EdgeCutEdge),
|
||||
Helix(Helix),
|
||||
}
|
||||
@ -550,8 +549,9 @@ impl Artifact {
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
pub(crate) fn code_ref(&self) -> Option<&CodeRef> {
|
||||
/// The [`CodeRef`] for the artifact itself. See also
|
||||
/// [`Self::face_code_ref`].
|
||||
pub fn code_ref(&self) -> Option<&CodeRef> {
|
||||
match self {
|
||||
Artifact::CompositeSolid(a) => Some(&a.code_ref),
|
||||
Artifact::Plane(a) => Some(&a.code_ref),
|
||||
@ -570,6 +570,24 @@ impl Artifact {
|
||||
}
|
||||
}
|
||||
|
||||
/// The [`CodeRef`] referring to the face artifact that it's on, not the
|
||||
/// artifact itself.
|
||||
pub fn face_code_ref(&self) -> Option<&CodeRef> {
|
||||
match self {
|
||||
Artifact::CompositeSolid(_)
|
||||
| Artifact::Plane(_)
|
||||
| Artifact::Path(_)
|
||||
| Artifact::Segment(_)
|
||||
| Artifact::Solid2d(_)
|
||||
| Artifact::StartSketchOnFace(_)
|
||||
| Artifact::StartSketchOnPlane(_)
|
||||
| Artifact::Sweep(_) => None,
|
||||
Artifact::Wall(a) => Some(&a.face_code_ref),
|
||||
Artifact::Cap(a) => Some(&a.face_code_ref),
|
||||
Artifact::SweepEdge(_) | Artifact::EdgeCut(_) | Artifact::EdgeCutEdge(_) | Artifact::Helix(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge the new artifact into self. If it can't because it's a different
|
||||
/// type, return the new artifact which should be used as a replacement.
|
||||
fn merge(&mut self, new: Artifact) -> Option<Artifact> {
|
||||
@ -704,6 +722,19 @@ impl ArtifactGraph {
|
||||
self.map.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.map.is_empty()
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = &Artifact> {
|
||||
self.map.values()
|
||||
}
|
||||
|
||||
/// Consume the artifact graph and return the map of artifacts.
|
||||
fn into_map(self) -> IndexMap<ArtifactId, Artifact> {
|
||||
self.map
|
||||
}
|
||||
|
||||
/// Used to make the mermaid tests deterministic.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn sort(&mut self) {
|
||||
@ -712,17 +743,30 @@ impl ArtifactGraph {
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the artifact graph from the artifact commands and the responses. The
|
||||
/// initial graph is the graph cached from a previous execution. NodePaths of
|
||||
/// `exec_artifacts` are filled in from the AST.
|
||||
pub(super) fn build_artifact_graph(
|
||||
artifact_commands: &[ArtifactCommand],
|
||||
responses: &IndexMap<Uuid, WebSocketResponse>,
|
||||
ast: &Node<Program>,
|
||||
exec_artifacts: &IndexMap<ArtifactId, Artifact>,
|
||||
cached_body_items: usize,
|
||||
exec_artifacts: &mut IndexMap<ArtifactId, Artifact>,
|
||||
initial_graph: ArtifactGraph,
|
||||
) -> Result<ArtifactGraph, KclError> {
|
||||
let mut map = IndexMap::new();
|
||||
let mut map = initial_graph.into_map();
|
||||
|
||||
let mut path_to_plane_id_map = FnvHashMap::default();
|
||||
let mut current_plane_id = None;
|
||||
|
||||
// Fill in NodePaths for artifacts that were added directly to the map
|
||||
// during execution.
|
||||
for exec_artifact in exec_artifacts.values_mut() {
|
||||
// Note: We only have access to the new AST. So if these artifacts
|
||||
// somehow came from cached AST, this won't fill in anything.
|
||||
fill_in_node_paths(exec_artifact, ast, cached_body_items);
|
||||
}
|
||||
|
||||
for artifact_command in artifact_commands {
|
||||
if let ModelingCmd::EnableSketchMode(EnableSketchMode { entity_id, .. }) = artifact_command.command {
|
||||
current_plane_id = Some(entity_id);
|
||||
@ -747,6 +791,7 @@ pub(super) fn build_artifact_graph(
|
||||
&flattened_responses,
|
||||
&path_to_plane_id_map,
|
||||
ast,
|
||||
cached_body_items,
|
||||
exec_artifacts,
|
||||
)?;
|
||||
for artifact in artifact_updates {
|
||||
@ -762,6 +807,26 @@ pub(super) fn build_artifact_graph(
|
||||
Ok(ArtifactGraph { map })
|
||||
}
|
||||
|
||||
/// These may have been created with placeholder `CodeRef`s because we didn't
|
||||
/// have the entire AST available. Now we fill them in.
|
||||
fn fill_in_node_paths(artifact: &mut Artifact, program: &Node<Program>, cached_body_items: usize) {
|
||||
match artifact {
|
||||
Artifact::StartSketchOnFace(face) => {
|
||||
if face.code_ref.node_path.is_empty() {
|
||||
face.code_ref.node_path =
|
||||
NodePath::from_range(program, cached_body_items, face.code_ref.range).unwrap_or_default();
|
||||
}
|
||||
}
|
||||
Artifact::StartSketchOnPlane(plane) => {
|
||||
if plane.code_ref.node_path.is_empty() {
|
||||
plane.code_ref.node_path =
|
||||
NodePath::from_range(program, cached_body_items, plane.code_ref.range).unwrap_or_default();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Flatten the responses into a map of command IDs to modeling command
|
||||
/// responses. The raw responses from the engine contain batches.
|
||||
fn flatten_modeling_command_responses(
|
||||
@ -844,28 +909,28 @@ fn artifacts_to_update(
|
||||
responses: &FnvHashMap<Uuid, OkModelingCmdResponse>,
|
||||
path_to_plane_id_map: &FnvHashMap<Uuid, Uuid>,
|
||||
ast: &Node<Program>,
|
||||
cached_body_items: usize,
|
||||
exec_artifacts: &IndexMap<ArtifactId, Artifact>,
|
||||
) -> Result<Vec<Artifact>, KclError> {
|
||||
let uuid = artifact_command.cmd_id;
|
||||
let Some(response) = responses.get(&uuid) else {
|
||||
// Response not found or not successful.
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
// TODO: Build path-to-node from artifact_command source range. Right now,
|
||||
// we're serializing an empty array, and the TS wrapper fills it in with the
|
||||
// correct value based on NodePath.
|
||||
let path_to_node = Vec::new();
|
||||
let range = artifact_command.range;
|
||||
let node_path = NodePath::from_range(ast, range).unwrap_or_default();
|
||||
let node_path = NodePath::from_range(ast, cached_body_items, range).unwrap_or_default();
|
||||
let code_ref = CodeRef {
|
||||
range,
|
||||
node_path,
|
||||
path_to_node,
|
||||
};
|
||||
|
||||
let uuid = artifact_command.cmd_id;
|
||||
let id = ArtifactId::new(uuid);
|
||||
|
||||
let Some(response) = responses.get(&uuid) else {
|
||||
// Response not found or not successful.
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let cmd = &artifact_command.command;
|
||||
|
||||
match cmd {
|
||||
@ -1100,16 +1165,19 @@ fn artifacts_to_update(
|
||||
let extra_artifact = exec_artifacts.values().find(|a| {
|
||||
if let Artifact::StartSketchOnFace(s) = a {
|
||||
s.face_id == face_id
|
||||
} else if let Artifact::StartSketchOnPlane(s) = a {
|
||||
s.plane_id == face_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let sketch_on_face_source_range = extra_artifact
|
||||
let sketch_on_face_code_ref = extra_artifact
|
||||
.and_then(|a| match a {
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.range),
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.clone()),
|
||||
Artifact::StartSketchOnPlane(s) => Some(s.code_ref.clone()),
|
||||
_ => None,
|
||||
})
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
.unwrap_or_default();
|
||||
|
||||
return_arr.push(Artifact::Wall(Wall {
|
||||
@ -1118,11 +1186,7 @@ fn artifacts_to_update(
|
||||
edge_cut_edge_ids: Vec::new(),
|
||||
sweep_id: path_sweep_id,
|
||||
path_ids: Vec::new(),
|
||||
face_code_ref: CodeRef {
|
||||
range: sketch_on_face_source_range,
|
||||
node_path: NodePath::from_range(ast, sketch_on_face_source_range).unwrap_or_default(),
|
||||
path_to_node: Vec::new(),
|
||||
},
|
||||
face_code_ref: sketch_on_face_code_ref,
|
||||
cmd_id: artifact_command.cmd_id,
|
||||
}));
|
||||
let mut new_seg = seg.clone();
|
||||
@ -1155,15 +1219,19 @@ fn artifacts_to_update(
|
||||
let extra_artifact = exec_artifacts.values().find(|a| {
|
||||
if let Artifact::StartSketchOnFace(s) = a {
|
||||
s.face_id == face_id
|
||||
} else if let Artifact::StartSketchOnPlane(s) = a {
|
||||
s.plane_id == face_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let sketch_on_face_source_range = extra_artifact
|
||||
let sketch_on_face_code_ref = extra_artifact
|
||||
.and_then(|a| match a {
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.range),
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.clone()),
|
||||
Artifact::StartSketchOnPlane(s) => Some(s.code_ref.clone()),
|
||||
_ => None,
|
||||
})
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
.unwrap_or_default();
|
||||
return_arr.push(Artifact::Cap(Cap {
|
||||
id: face_id,
|
||||
@ -1171,11 +1239,7 @@ fn artifacts_to_update(
|
||||
edge_cut_edge_ids: Vec::new(),
|
||||
sweep_id: path_sweep_id,
|
||||
path_ids: Vec::new(),
|
||||
face_code_ref: CodeRef {
|
||||
range: sketch_on_face_source_range,
|
||||
node_path: NodePath::from_range(ast, sketch_on_face_source_range).unwrap_or_default(),
|
||||
path_to_node: Vec::new(),
|
||||
},
|
||||
face_code_ref: sketch_on_face_code_ref,
|
||||
cmd_id: artifact_command.cmd_id,
|
||||
}));
|
||||
let Some(Artifact::Sweep(sweep)) = artifacts.get(&path_sweep_id) else {
|
||||
|
@ -298,13 +298,19 @@ impl ArtifactGraph {
|
||||
let range = code_ref.range;
|
||||
[range.start(), range.end(), range.module_id().as_usize()]
|
||||
}
|
||||
fn node_path_display<W: Write>(output: &mut W, prefix: &str, code_ref: &CodeRef) -> std::fmt::Result {
|
||||
fn node_path_display<W: Write>(
|
||||
output: &mut W,
|
||||
prefix: &str,
|
||||
label: Option<&str>,
|
||||
code_ref: &CodeRef,
|
||||
) -> std::fmt::Result {
|
||||
// %% is a mermaid comment. Prefix is increased one level since it's
|
||||
// a child of the line above it.
|
||||
let label = label.unwrap_or("");
|
||||
if code_ref.node_path.is_empty() {
|
||||
return writeln!(output, "{prefix} %% Missing NodePath");
|
||||
return writeln!(output, "{prefix} %% {label}Missing NodePath");
|
||||
}
|
||||
writeln!(output, "{prefix} %% {:?}", code_ref.node_path.steps)
|
||||
writeln!(output, "{prefix} %% {label}{:?}", code_ref.node_path.steps)
|
||||
}
|
||||
|
||||
match artifact {
|
||||
@ -315,7 +321,7 @@ impl ArtifactGraph {
|
||||
composite_solid.sub_type,
|
||||
code_ref_display(&composite_solid.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &composite_solid.code_ref)?;
|
||||
node_path_display(output, prefix, None, &composite_solid.code_ref)?;
|
||||
}
|
||||
Artifact::Plane(plane) => {
|
||||
writeln!(
|
||||
@ -323,7 +329,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Plane<br>{:?}\"]",
|
||||
code_ref_display(&plane.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &plane.code_ref)?;
|
||||
node_path_display(output, prefix, None, &plane.code_ref)?;
|
||||
}
|
||||
Artifact::Path(path) => {
|
||||
writeln!(
|
||||
@ -331,7 +337,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Path<br>{:?}\"]",
|
||||
code_ref_display(&path.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &path.code_ref)?;
|
||||
node_path_display(output, prefix, None, &path.code_ref)?;
|
||||
}
|
||||
Artifact::Segment(segment) => {
|
||||
writeln!(
|
||||
@ -339,7 +345,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Segment<br>{:?}\"]",
|
||||
code_ref_display(&segment.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &segment.code_ref)?;
|
||||
node_path_display(output, prefix, None, &segment.code_ref)?;
|
||||
}
|
||||
Artifact::Solid2d(_solid2d) => {
|
||||
writeln!(output, "{prefix}{}[Solid2d]", id)?;
|
||||
@ -350,7 +356,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"StartSketchOnFace<br>{:?}\"]",
|
||||
code_ref_display(code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, code_ref)?;
|
||||
node_path_display(output, prefix, None, code_ref)?;
|
||||
}
|
||||
Artifact::StartSketchOnPlane(StartSketchOnPlane { code_ref, .. }) => {
|
||||
writeln!(
|
||||
@ -358,7 +364,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"StartSketchOnPlane<br>{:?}\"]",
|
||||
code_ref_display(code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, code_ref)?;
|
||||
node_path_display(output, prefix, None, code_ref)?;
|
||||
}
|
||||
Artifact::Sweep(sweep) => {
|
||||
writeln!(
|
||||
@ -367,13 +373,15 @@ impl ArtifactGraph {
|
||||
sweep.sub_type,
|
||||
code_ref_display(&sweep.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &sweep.code_ref)?;
|
||||
node_path_display(output, prefix, None, &sweep.code_ref)?;
|
||||
}
|
||||
Artifact::Wall(_wall) => {
|
||||
Artifact::Wall(wall) => {
|
||||
writeln!(output, "{prefix}{id}[Wall]")?;
|
||||
node_path_display(output, prefix, Some("face_code_ref="), &wall.face_code_ref)?;
|
||||
}
|
||||
Artifact::Cap(cap) => {
|
||||
writeln!(output, "{prefix}{id}[\"Cap {:?}\"]", cap.sub_type)?;
|
||||
node_path_display(output, prefix, Some("face_code_ref="), &cap.face_code_ref)?;
|
||||
}
|
||||
Artifact::SweepEdge(sweep_edge) => {
|
||||
writeln!(output, "{prefix}{id}[\"SweepEdge {:?}\"]", sweep_edge.sub_type)?;
|
||||
@ -385,7 +393,7 @@ impl ArtifactGraph {
|
||||
edge_cut.sub_type,
|
||||
code_ref_display(&edge_cut.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &edge_cut.code_ref)?;
|
||||
node_path_display(output, prefix, None, &edge_cut.code_ref)?;
|
||||
}
|
||||
Artifact::EdgeCutEdge(_edge_cut_edge) => {
|
||||
writeln!(output, "{prefix}{id}[EdgeCutEdge]")?;
|
||||
@ -396,7 +404,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Helix<br>{:?}\"]",
|
||||
code_ref_display(&helix.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &helix.code_ref)?;
|
||||
node_path_display(output, prefix, None, &helix.code_ref)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -79,6 +79,9 @@ pub(super) enum CacheResult {
|
||||
reapply_settings: bool,
|
||||
/// The program that needs to be executed.
|
||||
program: Node<Program>,
|
||||
/// The number of body items that were cached and omitted from the
|
||||
/// program that needs to be executed. Used to compute [`crate::NodePath`].
|
||||
cached_body_items: usize,
|
||||
},
|
||||
/// Check only the imports, and not the main program.
|
||||
/// Before sending this we already checked the main program and it is the same.
|
||||
@ -191,6 +194,7 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new.ast.clone(),
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -219,6 +223,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -239,6 +244,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
@ -255,6 +261,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: false,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: old_ast.body.len(),
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
@ -592,7 +599,8 @@ startSketchOn(XY)
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -630,7 +638,8 @@ startSketchOn(XY)
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -736,21 +736,35 @@ fn apply_ascription(
|
||||
let ty = RuntimeType::from_parsed(ty.inner.clone(), exec_state, value.into())
|
||||
.map_err(|e| KclError::Semantic(e.into()))?;
|
||||
|
||||
if let KclValue::Number { value, meta, .. } = value {
|
||||
let mut value = value.clone();
|
||||
|
||||
// If the number has unknown units but the user is explicitly specifying them, treat the value as having had it's units erased,
|
||||
// rather than forcing the user to explicitly erase them.
|
||||
KclValue::Number {
|
||||
if let KclValue::Number { value: n, meta, .. } = &value {
|
||||
if let RuntimeType::Primitive(PrimitiveType::Number(num)) = &ty {
|
||||
if num.is_fully_specified() {
|
||||
value = KclValue::Number {
|
||||
ty: NumericType::Any,
|
||||
value: *value,
|
||||
value: *n,
|
||||
meta: meta.clone(),
|
||||
};
|
||||
}
|
||||
.coerce(&ty, exec_state)
|
||||
}
|
||||
}
|
||||
|
||||
value.coerce(&ty, exec_state).map_err(|_| {
|
||||
let suggestion = if ty == RuntimeType::length() {
|
||||
", you might try coercing to a fully specified numeric type such as `number(mm)`"
|
||||
} else if ty == RuntimeType::angle() {
|
||||
", you might try coercing to a fully specified numeric type such as `number(deg)`"
|
||||
} else {
|
||||
value.coerce(&ty, exec_state)
|
||||
}
|
||||
.map_err(|_| {
|
||||
""
|
||||
};
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
message: format!("could not coerce {} value to type {}", value.human_friendly_type(), ty),
|
||||
message: format!(
|
||||
"could not coerce {} value to type {ty}{suggestion}",
|
||||
value.human_friendly_type()
|
||||
),
|
||||
source_ranges: vec![source_range],
|
||||
})
|
||||
})
|
||||
@ -1311,10 +1325,15 @@ impl Node<CallExpressionKw> {
|
||||
Some(l) => {
|
||||
fn_args.insert(l.name.clone(), arg);
|
||||
}
|
||||
None => errors.push(arg),
|
||||
None => {
|
||||
if let Some(id) = arg_expr.arg.ident_name() {
|
||||
fn_args.insert(id.to_owned(), arg);
|
||||
} else {
|
||||
errors.push(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let fn_args = fn_args; // remove mutability
|
||||
|
||||
// Evaluate the unlabeled first param, if any exists.
|
||||
let unlabeled = if let Some(ref arg_expr) = self.unlabeled {
|
||||
@ -1323,12 +1342,15 @@ impl Node<CallExpressionKw> {
|
||||
let value = ctx
|
||||
.execute_expr(arg_expr, exec_state, &metadata, &[], StatementKind::Expression)
|
||||
.await?;
|
||||
Some(Arg::new(value, source_range))
|
||||
|
||||
let label = arg_expr.ident_name().map(str::to_owned);
|
||||
|
||||
Some((label, Arg::new(value, source_range)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let args = Args::new_kw(
|
||||
let mut args = Args::new_kw(
|
||||
KwArgs {
|
||||
unlabeled,
|
||||
labeled: fn_args,
|
||||
@ -1347,6 +1369,20 @@ impl Node<CallExpressionKw> {
|
||||
));
|
||||
}
|
||||
|
||||
let formals = func.args(false);
|
||||
|
||||
// If it's possible the input arg was meant to be labelled and we probably don't want to use
|
||||
// it as the input arg, then treat it as labelled.
|
||||
if let Some((Some(label), _)) = &args.kw_args.unlabeled {
|
||||
if (formals.iter().all(|a| a.label_required) || exec_state.pipe_value().is_some())
|
||||
&& formals.iter().any(|a| &a.name == label && a.label_required)
|
||||
&& !args.kw_args.labeled.contains_key(label)
|
||||
{
|
||||
let (label, arg) = args.kw_args.unlabeled.take().unwrap();
|
||||
args.kw_args.labeled.insert(label.unwrap(), arg);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
let op = if func.feature_tree_operation() {
|
||||
let op_labeled_args = args
|
||||
@ -1368,7 +1404,6 @@ impl Node<CallExpressionKw> {
|
||||
None
|
||||
};
|
||||
|
||||
let formals = func.args(false);
|
||||
for (label, arg) in &args.kw_args.labeled {
|
||||
match formals.iter().find(|p| &p.name == label) {
|
||||
Some(p) => {
|
||||
@ -1432,7 +1467,6 @@ impl Node<CallExpressionKw> {
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// Add the call expression to the source ranges.
|
||||
// TODO currently ignored by the frontend
|
||||
e.add_source_ranges(vec![callsite])
|
||||
})?;
|
||||
|
||||
@ -1865,6 +1899,21 @@ fn type_check_params_kw(
|
||||
args: &mut KwArgs,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(), KclError> {
|
||||
// If it's possible the input arg was meant to be labelled and we probably don't want to use
|
||||
// it as the input arg, then treat it as labelled.
|
||||
if let Some((Some(label), _)) = &args.unlabeled {
|
||||
if (function_expression.params.iter().all(|p| p.labeled) || exec_state.pipe_value().is_some())
|
||||
&& function_expression
|
||||
.params
|
||||
.iter()
|
||||
.any(|p| &p.identifier.name == label && p.labeled)
|
||||
&& !args.labeled.contains_key(label)
|
||||
{
|
||||
let (label, arg) = args.unlabeled.take().unwrap();
|
||||
args.labeled.insert(label.unwrap(), arg);
|
||||
}
|
||||
}
|
||||
|
||||
for (label, arg) in &mut args.labeled {
|
||||
match function_expression.params.iter().find(|p| &p.identifier.name == label) {
|
||||
Some(p) => {
|
||||
@ -1959,10 +2008,11 @@ fn type_check_params_kw(
|
||||
if let Some(arg) = &mut args.unlabeled {
|
||||
if let Some(p) = function_expression.params.iter().find(|p| !p.labeled) {
|
||||
if let Some(ty) = &p.type_ {
|
||||
arg.value = arg
|
||||
arg.1.value = arg
|
||||
.1
|
||||
.value
|
||||
.coerce(
|
||||
&RuntimeType::from_parsed(ty.inner.clone(), exec_state, arg.source_range)
|
||||
&RuntimeType::from_parsed(ty.inner.clone(), exec_state, arg.1.source_range)
|
||||
.map_err(|e| KclError::Semantic(e.into()))?,
|
||||
exec_state,
|
||||
)
|
||||
@ -1974,9 +2024,9 @@ fn type_check_params_kw(
|
||||
.map(|n| format!("`{}`", n))
|
||||
.unwrap_or_else(|| "this function".to_owned()),
|
||||
ty.inner,
|
||||
arg.value.human_friendly_type()
|
||||
arg.1.value.human_friendly_type()
|
||||
),
|
||||
source_ranges: vec![arg.source_range],
|
||||
source_ranges: vec![arg.1.source_range],
|
||||
})
|
||||
})?;
|
||||
}
|
||||
@ -2139,10 +2189,11 @@ impl FunctionSource {
|
||||
if let Some(arg) = &mut args.kw_args.unlabeled {
|
||||
if let Some(p) = ast.params.iter().find(|p| !p.labeled) {
|
||||
if let Some(ty) = &p.type_ {
|
||||
arg.value = arg
|
||||
arg.1.value = arg
|
||||
.1
|
||||
.value
|
||||
.coerce(
|
||||
&RuntimeType::from_parsed(ty.inner.clone(), exec_state, arg.source_range)
|
||||
&RuntimeType::from_parsed(ty.inner.clone(), exec_state, arg.1.source_range)
|
||||
.map_err(|e| KclError::Semantic(e.into()))?,
|
||||
exec_state,
|
||||
)
|
||||
@ -2152,7 +2203,7 @@ impl FunctionSource {
|
||||
"The input argument of {} requires a value with type `{}`, but found {}",
|
||||
props.name,
|
||||
ty.inner,
|
||||
arg.value.human_friendly_type(),
|
||||
arg.1.value.human_friendly_type(),
|
||||
),
|
||||
source_ranges: vec![callsite],
|
||||
})
|
||||
@ -2224,7 +2275,7 @@ impl FunctionSource {
|
||||
.kw_args
|
||||
.unlabeled
|
||||
.as_ref()
|
||||
.map(|arg| OpArg::new(OpKclValue::from(&arg.value), arg.source_range)),
|
||||
.map(|arg| OpArg::new(OpKclValue::from(&arg.1.value), arg.1.source_range)),
|
||||
labeled_args: op_labeled_args,
|
||||
},
|
||||
source_range: callsite,
|
||||
@ -2665,13 +2716,12 @@ a = foo()
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_sensible_error_when_missing_equals_in_kwarg() {
|
||||
for (i, call) in ["f(x=1,y)", "f(x=1,3,z)", "f(x=1,y,z=1)", "f(x=1, 3 + 4, z=1)"]
|
||||
for (i, call) in ["f(x=1,3,0)", "f(x=1,3,z)", "f(x=1,0,z=1)", "f(x=1, 3 + 4, z)"]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
{
|
||||
let program = format!(
|
||||
"fn foo() {{ return 0 }}
|
||||
y = 42
|
||||
z = 0
|
||||
fn f(x, y, z) {{ return 0 }}
|
||||
{call}"
|
||||
@ -2691,9 +2741,10 @@ fn f(x, y, z) {{ return 0 }}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn default_param_for_unlabeled() {
|
||||
// Tests that the input param for myExtrude is taken from the pipeline value.
|
||||
// Tests that the input param for myExtrude is taken from the pipeline value and same-name
|
||||
// keyword args.
|
||||
let ast = r#"fn myExtrude(@sk, length) {
|
||||
return extrude(sk, length = length)
|
||||
return extrude(sk, length)
|
||||
}
|
||||
sketch001 = startSketchOn(XY)
|
||||
|> circle(center = [0, 0], radius = 93.75)
|
||||
@ -2703,6 +2754,18 @@ sketch001 = startSketchOn(XY)
|
||||
parse_execute(ast).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn dont_use_unlabelled_as_input() {
|
||||
// `length` should be used as the `length` argument to extrude, not the unlabelled input
|
||||
let ast = r#"length = 10
|
||||
startSketchOn(XY)
|
||||
|> circle(center = [0, 0], radius = 93.75)
|
||||
|> extrude(length)
|
||||
"#;
|
||||
|
||||
parse_execute(ast).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn ascription_in_binop() {
|
||||
let ast = r#"foo = tan(0): number(rad) - 4deg"#;
|
||||
@ -2717,4 +2780,29 @@ sketch001 = startSketchOn(XY)
|
||||
// Make sure we get a useful error message and not an engine error.
|
||||
assert!(e.message().contains("sqrt"), "Error message: '{}'", e.message());
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_unknown_to_length() {
|
||||
let ast = r#"x = 2mm * 2mm
|
||||
y = x: number(Length)"#;
|
||||
let e = parse_execute(ast).await.unwrap_err();
|
||||
assert!(
|
||||
e.message().contains("could not coerce"),
|
||||
"Error message: '{}'",
|
||||
e.message()
|
||||
);
|
||||
|
||||
let ast = r#"x = 2mm
|
||||
y = x: number(Length)"#;
|
||||
let result = parse_execute(ast).await.unwrap();
|
||||
let mem = result.exec_state.stack();
|
||||
let num = mem
|
||||
.memory
|
||||
.get_from("y", result.mem_env, SourceRange::default(), 0)
|
||||
.unwrap()
|
||||
.as_ty_f64()
|
||||
.unwrap();
|
||||
assert_eq!(num.n, 2.0);
|
||||
assert_eq!(num.ty, NumericType::mm());
|
||||
}
|
||||
}
|
||||
|
@ -426,14 +426,14 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn new_mock() -> Self {
|
||||
pub async fn new_mock(settings: Option<ExecutorSettings>) -> Self {
|
||||
ExecutorContext {
|
||||
engine: Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new().await.unwrap(),
|
||||
)),
|
||||
fs: Arc::new(FileManager::new()),
|
||||
stdlib: Arc::new(StdLib::new()),
|
||||
settings: Default::default(),
|
||||
settings: settings.unwrap_or_default(),
|
||||
context_type: ContextType::Mock,
|
||||
}
|
||||
}
|
||||
@ -571,7 +571,7 @@ impl ExecutorContext {
|
||||
// part of the scene).
|
||||
exec_state.mut_stack().push_new_env_for_scope();
|
||||
|
||||
let result = self.inner_run(&program, &mut exec_state, true).await?;
|
||||
let result = self.inner_run(&program, 0, &mut exec_state, true).await?;
|
||||
|
||||
// Restore any temporary variables, then save any newly created variables back to
|
||||
// memory in case another run wants to use them. Note this is just saved to the preserved
|
||||
@ -590,12 +590,13 @@ impl ExecutorContext {
|
||||
pub async fn run_with_caching(&self, program: crate::Program) -> Result<ExecOutcome, KclErrorWithOutputs> {
|
||||
assert!(!self.is_mock());
|
||||
|
||||
let (program, mut exec_state, preserve_mem, imports_info) = if let Some(OldAstState {
|
||||
let (program, mut exec_state, preserve_mem, cached_body_items, imports_info) = if let Some(OldAstState {
|
||||
ast: old_ast,
|
||||
exec_state: mut old_state,
|
||||
settings: old_settings,
|
||||
result_env,
|
||||
}) = cache::read_old_ast().await
|
||||
}) =
|
||||
cache::read_old_ast().await
|
||||
{
|
||||
let old = CacheInformation {
|
||||
ast: &old_ast,
|
||||
@ -607,11 +608,13 @@ impl ExecutorContext {
|
||||
};
|
||||
|
||||
// Get the program that actually changed from the old and new information.
|
||||
let (clear_scene, program, import_check_info) = match cache::get_changed_program(old, new).await {
|
||||
let (clear_scene, program, body_items, import_check_info) = match cache::get_changed_program(old, new).await
|
||||
{
|
||||
CacheResult::ReExecute {
|
||||
clear_scene,
|
||||
reapply_settings,
|
||||
program: changed_program,
|
||||
cached_body_items,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
@ -620,7 +623,7 @@ impl ExecutorContext {
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
(true, program, cached_body_items, None)
|
||||
} else {
|
||||
(
|
||||
clear_scene,
|
||||
@ -628,6 +631,7 @@ impl ExecutorContext {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
cached_body_items,
|
||||
None,
|
||||
)
|
||||
}
|
||||
@ -643,7 +647,7 @@ impl ExecutorContext {
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
(true, program, old_ast.body.len(), None)
|
||||
} else {
|
||||
// We need to check our imports to see if they changed.
|
||||
let mut new_exec_state = ExecState::new(self);
|
||||
@ -676,6 +680,7 @@ impl ExecutorContext {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
old_ast.body.len(),
|
||||
// We only care about this if we are clearing the scene.
|
||||
if clear_scene {
|
||||
Some((new_universe, new_universe_map, new_exec_state))
|
||||
@ -704,7 +709,7 @@ impl ExecutorContext {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
(true, program, None)
|
||||
(true, program, old_ast.body.len(), None)
|
||||
}
|
||||
CacheResult::NoAction(false) => {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
@ -736,17 +741,17 @@ impl ExecutorContext {
|
||||
(old_state, true, None)
|
||||
};
|
||||
|
||||
(program, exec_state, preserve_mem, universe_info)
|
||||
(program, exec_state, preserve_mem, body_items, universe_info)
|
||||
} else {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
(program, exec_state, false, None)
|
||||
(program, exec_state, false, 0, None)
|
||||
};
|
||||
|
||||
let result = self
|
||||
.run_concurrent(&program, &mut exec_state, imports_info, preserve_mem)
|
||||
.run_concurrent(&program, cached_body_items, &mut exec_state, imports_info, preserve_mem)
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@ -780,7 +785,7 @@ impl ExecutorContext {
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
self.run_concurrent(program, exec_state, None, false).await
|
||||
self.run_concurrent(program, 0, exec_state, None, false).await
|
||||
}
|
||||
|
||||
/// Perform the execution of a program using a concurrent
|
||||
@ -793,6 +798,7 @@ impl ExecutorContext {
|
||||
pub async fn run_concurrent(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
universe_info: Option<(Universe, UniverseMap)>,
|
||||
preserve_mem: bool,
|
||||
@ -823,6 +829,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -999,6 +1006,7 @@ impl ExecutorContext {
|
||||
|
||||
return Err(KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1014,7 +1022,8 @@ impl ExecutorContext {
|
||||
}
|
||||
}
|
||||
|
||||
self.inner_run(program, exec_state, preserve_mem).await
|
||||
self.inner_run(program, cached_body_items, exec_state, preserve_mem)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Get the universe & universe map of the program.
|
||||
@ -1048,6 +1057,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1068,6 +1078,7 @@ impl ExecutorContext {
|
||||
async fn inner_run(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
@ -1081,7 +1092,7 @@ impl ExecutorContext {
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
let result = self
|
||||
.execute_and_build_graph(&program.ast, exec_state, preserve_mem)
|
||||
.execute_and_build_graph(&program.ast, cached_body_items, exec_state, preserve_mem)
|
||||
.await;
|
||||
|
||||
crate::log::log(format!(
|
||||
@ -1100,6 +1111,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1127,6 +1139,7 @@ impl ExecutorContext {
|
||||
async fn execute_and_build_graph(
|
||||
&self,
|
||||
program: NodeRef<'_, crate::parsing::ast::types::Program>,
|
||||
#[cfg_attr(not(feature = "artifact-graph"), expect(unused))] cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<EnvironmentRef, KclError> {
|
||||
@ -1155,23 +1168,25 @@ impl ExecutorContext {
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
{
|
||||
// Move the artifact commands and responses to simplify cache management
|
||||
// and error creation.
|
||||
exec_state
|
||||
.global
|
||||
.artifact_commands
|
||||
.extend(self.engine.take_artifact_commands().await);
|
||||
exec_state
|
||||
.global
|
||||
.artifact_responses
|
||||
.extend(self.engine.take_responses().await);
|
||||
let new_commands = self.engine.take_artifact_commands().await;
|
||||
let new_responses = self.engine.take_responses().await;
|
||||
let initial_graph = exec_state.global.artifact_graph.clone();
|
||||
|
||||
// Build the artifact graph.
|
||||
match build_artifact_graph(
|
||||
&exec_state.global.artifact_commands,
|
||||
&exec_state.global.artifact_responses,
|
||||
let graph_result = build_artifact_graph(
|
||||
&new_commands,
|
||||
&new_responses,
|
||||
program,
|
||||
&exec_state.global.artifacts,
|
||||
) {
|
||||
cached_body_items,
|
||||
&mut exec_state.global.artifacts,
|
||||
initial_graph,
|
||||
);
|
||||
// Move the artifact commands and responses into ExecState to
|
||||
// simplify cache management and error creation.
|
||||
exec_state.global.artifact_commands.extend(new_commands);
|
||||
exec_state.global.artifact_responses.extend(new_responses);
|
||||
|
||||
match graph_result {
|
||||
Ok(artifact_graph) => {
|
||||
exec_state.global.artifact_graph = artifact_graph;
|
||||
exec_result.map(|(_, env_ref, _)| env_ref)
|
||||
@ -2232,7 +2247,7 @@ w = f() + f()
|
||||
let result = ctx.run_with_caching(program).await.unwrap();
|
||||
assert_eq!(result.variables.get("x").unwrap().as_f64().unwrap(), 2.0);
|
||||
|
||||
let ctx2 = ExecutorContext::new_mock().await;
|
||||
let ctx2 = ExecutorContext::new_mock(None).await;
|
||||
let program2 = crate::Program::parse_no_errs("z = x + 1").unwrap();
|
||||
let result = ctx2.run_mock(program2, true).await.unwrap();
|
||||
assert_eq!(result.variables.get("z").unwrap().as_f64().unwrap(), 3.0);
|
||||
|
@ -664,6 +664,17 @@ impl NumericType {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_fully_specified(&self) -> bool {
|
||||
!matches!(
|
||||
self,
|
||||
NumericType::Unknown
|
||||
| NumericType::Known(UnitType::Angle(UnitAngle::Unknown))
|
||||
| NumericType::Known(UnitType::Length(UnitLen::Unknown))
|
||||
| NumericType::Any
|
||||
| NumericType::Default { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn example_ty(&self) -> Option<String> {
|
||||
match self {
|
||||
Self::Known(t) if !self.is_unknown() => Some(t.to_string()),
|
||||
@ -1266,7 +1277,15 @@ impl KclValue {
|
||||
.satisfied(values.len(), allow_shrink)
|
||||
.ok_or(CoercionError::from(self))?;
|
||||
|
||||
assert!(len <= values.len());
|
||||
if len > values.len() {
|
||||
let message = format!(
|
||||
"Internal: Expected coerced array length {len} to be less than or equal to original length {}",
|
||||
values.len()
|
||||
);
|
||||
exec_state.err(CompilationError::err(self.into(), message.clone()));
|
||||
#[cfg(debug_assertions)]
|
||||
panic!("{message}");
|
||||
}
|
||||
values.truncate(len);
|
||||
|
||||
Ok(KclValue::HomArray {
|
||||
@ -1460,7 +1479,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_idempotent() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
let values = values(&mut exec_state);
|
||||
for v in &values {
|
||||
// Identity subtype
|
||||
@ -1550,7 +1569,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_none() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
let none = KclValue::KclNone {
|
||||
value: crate::parsing::ast::types::KclNone::new(),
|
||||
meta: Vec::new(),
|
||||
@ -1608,7 +1627,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_record() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
let obj0 = KclValue::Object {
|
||||
value: HashMap::new(),
|
||||
@ -1690,7 +1709,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_array() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
let hom_arr = KclValue::HomArray {
|
||||
value: vec![
|
||||
@ -1843,7 +1862,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_union() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
// Subtyping smaller unions
|
||||
assert!(RuntimeType::Union(vec![]).subtype(&RuntimeType::Union(vec![
|
||||
@ -1894,7 +1913,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_axes() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
// Subtyping
|
||||
assert!(RuntimeType::Primitive(PrimitiveType::Axis2d).subtype(&RuntimeType::Primitive(PrimitiveType::Axis2d)));
|
||||
@ -2009,7 +2028,7 @@ mod test {
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_numeric() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
let count = KclValue::Number {
|
||||
value: 1.0,
|
||||
@ -2237,7 +2256,7 @@ d = cos(30)
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_nested_array() {
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock().await);
|
||||
let mut exec_state = ExecState::new(&crate::ExecutorContext::new_mock(None).await);
|
||||
|
||||
let mixed1 = KclValue::HomArray {
|
||||
value: vec![
|
||||
|
@ -99,7 +99,7 @@ pub use lsp::{
|
||||
kcl::{Backend as KclLspBackend, Server as KclLspServerSubCommand},
|
||||
};
|
||||
pub use modules::ModuleId;
|
||||
pub use parsing::ast::types::{FormatOptions, NodePath};
|
||||
pub use parsing::ast::types::{FormatOptions, NodePath, Step as NodePathStep};
|
||||
pub use settings::types::{project::ProjectConfiguration, Configuration, UnitLength};
|
||||
pub use source_range::SourceRange;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@ -250,8 +250,8 @@ impl Program {
|
||||
self.ast.lint(rule)
|
||||
}
|
||||
|
||||
pub fn node_path_from_range(&self, range: SourceRange) -> Option<NodePath> {
|
||||
NodePath::from_range(&self.ast, range)
|
||||
pub fn node_path_from_range(&self, cached_body_items: usize, range: SourceRange) -> Option<NodePath> {
|
||||
NodePath::from_range(&self.ast, cached_body_items, range)
|
||||
}
|
||||
|
||||
pub fn recast(&self) -> String {
|
||||
|
@ -4220,8 +4220,8 @@ sketch001 = startSketchOn(XY)
|
||||
result,
|
||||
vec![tower_lsp::lsp_types::ColorInformation {
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 24 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 33 },
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 25 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 32 },
|
||||
},
|
||||
color: tower_lsp::lsp_types::Color {
|
||||
red: 1.0,
|
||||
@ -4272,8 +4272,8 @@ sketch001 = startSketchOn(XY)
|
||||
result,
|
||||
vec![tower_lsp::lsp_types::ColorInformation {
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 24 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 33 },
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 25 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 32 },
|
||||
},
|
||||
color: tower_lsp::lsp_types::Color {
|
||||
red: 1.0,
|
||||
@ -4291,8 +4291,8 @@ sketch001 = startSketchOn(XY)
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 24 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 33 },
|
||||
start: tower_lsp::lsp_types::Position { line: 4, character: 25 },
|
||||
end: tower_lsp::lsp_types::Position { line: 4, character: 32 },
|
||||
},
|
||||
color: tower_lsp::lsp_types::Color {
|
||||
red: 1.0,
|
||||
@ -4316,3 +4316,64 @@ sketch001 = startSketchOn(XY)
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_kcl_lsp_diagnostic_compilation_warnings() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: r#"foo = 42
|
||||
@settings(defaultLengthUnit = mm)"#
|
||||
.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
|
||||
// Send diagnostics request.
|
||||
let diagnostics = server
|
||||
.diagnostic(tower_lsp::lsp_types::DocumentDiagnosticParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
partial_result_params: Default::default(),
|
||||
work_done_progress_params: Default::default(),
|
||||
identifier: None,
|
||||
previous_result_id: None,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Check the diagnostics.
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReportResult::Report(diagnostics) = diagnostics {
|
||||
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
|
||||
assert_eq!(
|
||||
diagnostics.full_document_diagnostic_report.items[0],
|
||||
tower_lsp::lsp_types::Diagnostic {
|
||||
range: tower_lsp::lsp_types::Range {
|
||||
start: tower_lsp::lsp_types::Position { line: 0, character: 8 },
|
||||
end: tower_lsp::lsp_types::Position { line: 1, character: 33 },
|
||||
},
|
||||
severity: Some(tower_lsp::lsp_types::DiagnosticSeverity::WARNING),
|
||||
code: None,
|
||||
source: Some("kcl".to_string()),
|
||||
message: "Named attributes should appear before any declarations or expressions.\n\nBecause named attributes apply to the whole function or module, including code written before them, it can be confusing for readers to not have these attributes at the top of code blocks.".to_string(),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
data: None,
|
||||
code_description: None,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
panic!("Expected full diagnostics");
|
||||
}
|
||||
} else {
|
||||
panic!("Expected diagnostics");
|
||||
}
|
||||
}
|
||||
|
@ -96,6 +96,7 @@ pub(crate) fn read_std(mod_name: &str) -> Option<&'static str> {
|
||||
"solid" => Some(include_str!("../std/solid.kcl")),
|
||||
"units" => Some(include_str!("../std/units.kcl")),
|
||||
"array" => Some(include_str!("../std/array.kcl")),
|
||||
"sweep" => Some(include_str!("../std/sweep.kcl")),
|
||||
"transform" => Some(include_str!("../std/transform.kcl")),
|
||||
_ => None,
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
use parse_display::{Display, FromStr};
|
||||
pub use path::NodePath;
|
||||
pub use path::{NodePath, Step};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::lsp_types::{
|
||||
@ -186,7 +186,7 @@ impl<T> Node<T> {
|
||||
self.comment_start = start;
|
||||
}
|
||||
|
||||
pub fn map_ref<'a, U: 'a>(&'a self, f: fn(&'a T) -> U) -> Node<U> {
|
||||
pub fn map_ref<'a, U: 'a>(&'a self, f: impl Fn(&'a T) -> U) -> Node<U> {
|
||||
Node {
|
||||
inner: f(&self.inner),
|
||||
start: self.start,
|
||||
@ -438,8 +438,15 @@ impl Node<Program> {
|
||||
let add_color = |literal: &Node<Literal>| {
|
||||
// Check if the string is a color.
|
||||
if let Some(c) = literal.value.is_color() {
|
||||
let source_range = literal.as_source_range();
|
||||
// We subtract 1 from either side because of the "'s in the literal.
|
||||
let fixed_source_range = SourceRange::new(
|
||||
source_range.start() + 1,
|
||||
source_range.end() - 1,
|
||||
source_range.module_id(),
|
||||
);
|
||||
let color = ColorInformation {
|
||||
range: literal.as_source_range().to_lsp_range(code),
|
||||
range: fixed_source_range.to_lsp_range(code),
|
||||
color: tower_lsp::lsp_types::Color {
|
||||
red: c.r,
|
||||
green: c.g,
|
||||
@ -498,7 +505,11 @@ impl Node<Program> {
|
||||
crate::walk::walk(self, |node: crate::walk::Node<'a>| {
|
||||
match node {
|
||||
crate::walk::Node::Literal(literal) => {
|
||||
if literal.start == pos_start && literal.end == pos_end && literal.value.is_color().is_some() {
|
||||
// Account for the quotes in the literal.
|
||||
if (literal.start + 1) == pos_start
|
||||
&& (literal.end - 1) == pos_end
|
||||
&& literal.value.is_color().is_some()
|
||||
{
|
||||
found.replace(true);
|
||||
return Ok(true);
|
||||
}
|
||||
@ -1187,7 +1198,7 @@ impl Expr {
|
||||
|
||||
pub fn ident_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Expr::Name(ident) => Some(&ident.name.name),
|
||||
Expr::Name(name) => name.local_ident().map(|n| n.inner),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -2371,7 +2382,7 @@ impl Name {
|
||||
|
||||
pub fn local_ident(&self) -> Option<Node<&str>> {
|
||||
if self.path.is_empty() && !self.abs_path {
|
||||
Some(self.name.map_ref(|n| &n.name))
|
||||
Some(self.name.map_ref(|n| &*n.name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -62,14 +62,21 @@ pub enum Step {
|
||||
impl NodePath {
|
||||
/// Given a program and a [`SourceRange`], return the path to the node that
|
||||
/// contains the range.
|
||||
pub(crate) fn from_range(program: &Node<Program>, range: SourceRange) -> Option<Self> {
|
||||
Self::from_body(&program.body, range, NodePath::default())
|
||||
pub(crate) fn from_range(program: &Node<Program>, cached_body_items: usize, range: SourceRange) -> Option<Self> {
|
||||
Self::from_body(&program.body, cached_body_items, range, NodePath::default())
|
||||
}
|
||||
|
||||
fn from_body(body: &[BodyItem], range: SourceRange, mut path: NodePath) -> Option<NodePath> {
|
||||
fn from_body(
|
||||
body: &[BodyItem],
|
||||
cached_body_items: usize,
|
||||
range: SourceRange,
|
||||
mut path: NodePath,
|
||||
) -> Option<NodePath> {
|
||||
for (i, item) in body.iter().enumerate() {
|
||||
if item.contains_range(&range) {
|
||||
path.push(Step::ProgramBodyItem { index: i });
|
||||
path.push(Step::ProgramBodyItem {
|
||||
index: cached_body_items + i,
|
||||
});
|
||||
return Self::from_body_item(item, range, path);
|
||||
}
|
||||
}
|
||||
@ -262,7 +269,7 @@ impl NodePath {
|
||||
}
|
||||
if node.then_val.contains_range(&range) {
|
||||
path.push(Step::IfExpressionThen);
|
||||
return Self::from_body(&node.then_val.body, range, path);
|
||||
return Self::from_body(&node.then_val.body, 0, range, path);
|
||||
}
|
||||
for else_if in &node.else_ifs {
|
||||
if else_if.contains_range(&range) {
|
||||
@ -273,14 +280,14 @@ impl NodePath {
|
||||
}
|
||||
if else_if.then_val.contains_range(&range) {
|
||||
path.push(Step::IfExpressionElseIfBody);
|
||||
return Self::from_body(&else_if.then_val.body, range, path);
|
||||
return Self::from_body(&else_if.then_val.body, 0, range, path);
|
||||
}
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
if node.final_else.contains_range(&range) {
|
||||
path.push(Step::IfExpressionElse);
|
||||
return Self::from_body(&node.final_else.body, range, path);
|
||||
return Self::from_body(&node.final_else.body, 0, range, path);
|
||||
}
|
||||
}
|
||||
Expr::LabelledExpression(node) => {
|
||||
@ -345,7 +352,7 @@ mod tests {
|
||||
// fn cube(sideLength, center) {
|
||||
// ^^^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(38, 42)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(38, 42)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![Step::ProgramBodyItem { index: 0 }, Step::VariableDeclarationDeclaration],
|
||||
}
|
||||
@ -353,7 +360,7 @@ mod tests {
|
||||
// fn cube(sideLength, center) {
|
||||
// ^^^^^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(55, 61)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(55, 61)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 0 },
|
||||
@ -366,7 +373,7 @@ mod tests {
|
||||
// |> line(endAbsolute = p1)
|
||||
// ^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(293, 295)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(293, 295)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 0 },
|
||||
@ -383,7 +390,7 @@ mod tests {
|
||||
// myCube = cube(sideLength = 40, center = [0, 0])
|
||||
// ^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(485, 486)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(485, 486)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 1 },
|
||||
|
@ -2729,6 +2729,17 @@ fn ty(i: &mut TokenSlice) -> PResult<Token> {
|
||||
keyword(i, "type")
|
||||
}
|
||||
|
||||
fn any_keyword(i: &mut TokenSlice) -> PResult<Token> {
|
||||
any.try_map(|token: Token| match token.token_type {
|
||||
TokenType::Keyword => Ok(token),
|
||||
_ => Err(CompilationError::fatal(
|
||||
token.as_source_range(),
|
||||
"expected some reserved keyword".to_owned(),
|
||||
)),
|
||||
})
|
||||
.parse_next(i)
|
||||
}
|
||||
|
||||
fn keyword(i: &mut TokenSlice, expected: &str) -> PResult<Token> {
|
||||
any.try_map(|token: Token| match token.token_type {
|
||||
TokenType::Keyword if token.value == expected => Ok(token),
|
||||
@ -3143,12 +3154,14 @@ fn fn_call_kw(i: &mut TokenSlice) -> PResult<Node<CallExpressionKw>> {
|
||||
NonCode(Node<NonCodeNode>),
|
||||
LabeledArg(LabeledArg),
|
||||
UnlabeledArg(Expr),
|
||||
Keyword(Token),
|
||||
}
|
||||
let initial_unlabeled_arg = opt((expression, comma, opt(whitespace)).map(|(arg, _, _)| arg)).parse_next(i)?;
|
||||
let args: Vec<_> = repeat(
|
||||
0..,
|
||||
alt((
|
||||
terminated(non_code_node.map(ArgPlace::NonCode), whitespace),
|
||||
terminated(any_keyword.map(ArgPlace::Keyword), whitespace),
|
||||
terminated(labeled_argument, labeled_arg_separator).map(ArgPlace::LabeledArg),
|
||||
expression.map(ArgPlace::UnlabeledArg),
|
||||
)),
|
||||
@ -3164,6 +3177,18 @@ fn fn_call_kw(i: &mut TokenSlice) -> PResult<Node<CallExpressionKw>> {
|
||||
ArgPlace::LabeledArg(x) => {
|
||||
args.push(x);
|
||||
}
|
||||
ArgPlace::Keyword(kw) => {
|
||||
return Err(ErrMode::Cut(
|
||||
CompilationError::fatal(
|
||||
SourceRange::from(kw.clone()),
|
||||
format!(
|
||||
"`{}` is not the name of an argument (it's a reserved keyword)",
|
||||
kw.value
|
||||
),
|
||||
)
|
||||
.into(),
|
||||
));
|
||||
}
|
||||
ArgPlace::UnlabeledArg(arg) => {
|
||||
let followed_by_equals = peek((opt(whitespace), equals)).parse_next(i).is_ok();
|
||||
if followed_by_equals {
|
||||
@ -5055,6 +5080,30 @@ bar = 1
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sensible_error_when_using_keyword_as_arg_label() {
|
||||
for (i, program) in ["pow(2, fn = 8)"].into_iter().enumerate() {
|
||||
let tokens = crate::parsing::token::lex(program, ModuleId::default()).unwrap();
|
||||
let err = match fn_call_kw.parse(tokens.as_slice()) {
|
||||
Err(e) => e,
|
||||
Ok(ast) => {
|
||||
eprintln!("{ast:#?}");
|
||||
panic!("Expected this to error but it didn't");
|
||||
}
|
||||
};
|
||||
let cause = err.inner().cause.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
cause.message, "`fn` is not the name of an argument (it's a reserved keyword)",
|
||||
"failed test {i}: {program}"
|
||||
);
|
||||
assert_eq!(
|
||||
cause.source_range.start(),
|
||||
program.find("fn").unwrap(),
|
||||
"failed test {i}: {program}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sensible_error_when_missing_rhs_of_obj_property() {
|
||||
for (i, program) in ["{x = 1, y =}"].into_iter().enumerate() {
|
||||
|
@ -21,7 +21,7 @@ struct Test {
|
||||
name: String,
|
||||
/// The name of the KCL file that's the entry point, e.g. "main.kcl", in the
|
||||
/// `input_dir`.
|
||||
entry_point: String,
|
||||
entry_point: PathBuf,
|
||||
/// Input KCL files are in this directory.
|
||||
input_dir: PathBuf,
|
||||
/// Expected snapshot output files are in this directory.
|
||||
@ -34,11 +34,16 @@ impl Test {
|
||||
fn new(name: &str) -> Self {
|
||||
Self {
|
||||
name: name.to_owned(),
|
||||
entry_point: "input.kcl".to_owned(),
|
||||
entry_point: Path::new("tests").join(name).join("input.kcl"),
|
||||
input_dir: Path::new("tests").join(name),
|
||||
output_dir: Path::new("tests").join(name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Read in the entry point file and return its contents as a string.
|
||||
pub fn read(&self) -> String {
|
||||
std::fs::read_to_string(&self.entry_point).expect("Failed to read file: {filename}")
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_snapshot<F, R>(test: &Test, operation: &str, f: F)
|
||||
@ -66,19 +71,12 @@ where
|
||||
settings.bind(f);
|
||||
}
|
||||
|
||||
fn read<P>(filename: &str, dir: P) -> String
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
std::fs::read_to_string(dir.as_ref().join(filename)).expect("Failed to read file: {filename}")
|
||||
}
|
||||
|
||||
fn parse(test_name: &str) {
|
||||
parse_test(&Test::new(test_name));
|
||||
}
|
||||
|
||||
fn parse_test(test: &Test) {
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let tokens = crate::parsing::token::lex(&input, ModuleId::default()).unwrap();
|
||||
|
||||
// Parse the tokens into an AST.
|
||||
@ -98,7 +96,7 @@ async fn unparse(test_name: &str) {
|
||||
|
||||
async fn unparse_test(test: &Test) {
|
||||
// Parse into an AST
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let tokens = crate::parsing::token::lex(&input, ModuleId::default()).unwrap();
|
||||
let ast = crate::parsing::parse_tokens(tokens).unwrap();
|
||||
|
||||
@ -111,10 +109,9 @@ async fn unparse_test(test: &Test) {
|
||||
}));
|
||||
|
||||
// Check all the rest of the files in the directory.
|
||||
let entry_point = test.input_dir.join(&test.entry_point);
|
||||
let kcl_files = crate::unparser::walk_dir(&test.input_dir).await.unwrap();
|
||||
// Filter out the entry point file.
|
||||
let kcl_files = kcl_files.into_iter().filter(|f| f != &entry_point);
|
||||
let kcl_files = kcl_files.into_iter().filter(|f| f != &test.entry_point);
|
||||
let futures = kcl_files
|
||||
.into_iter()
|
||||
.filter(|file| file.extension().is_some_and(|ext| ext == "kcl")) // We only care about kcl
|
||||
@ -154,13 +151,11 @@ async fn execute(test_name: &str, render_to_png: bool) {
|
||||
}
|
||||
|
||||
async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let ast = crate::Program::parse_no_errs(&input).unwrap();
|
||||
|
||||
// Run the program.
|
||||
let exec_res =
|
||||
crate::test_server::execute_and_snapshot_ast(ast, Some(test.input_dir.join(&test.entry_point)), export_step)
|
||||
.await;
|
||||
let exec_res = crate::test_server::execute_and_snapshot_ast(ast, Some(test.entry_point.clone()), export_step).await;
|
||||
match exec_res {
|
||||
Ok((exec_state, env_ref, png, step)) => {
|
||||
let fail_path = test.output_dir.join("execution_error.snap");
|
||||
@ -314,7 +309,7 @@ fn assert_common_snapshots(
|
||||
// Change the snapshot suffix so that it is rendered as a Markdown file
|
||||
// in GitHub.
|
||||
// Ignore the cpu cooler for now because its being a little bitch.
|
||||
if test.name == "cpu_cooler" {
|
||||
if test.name != "cpu-cooler" {
|
||||
insta::assert_binary_snapshot!("artifact_graph_flowchart.md", flowchart.as_bytes().to_owned());
|
||||
}
|
||||
})
|
||||
@ -1669,7 +1664,6 @@ mod mike_stress_test {
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore = "when kurt made the artifact graph lots of commands, this became super slow and sometimes the engine will just die, turn this back on when we can parallelize the simulation tests with snapshots deterministically"]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
@ -2749,7 +2743,6 @@ mod import_mesh_clone {
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore = "turn on when katie fixes the mesh import"]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
@ -2772,6 +2765,7 @@ mod clone_w_fillets {
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore] // turn on when https://github.com/KittyCAD/engine/pull/3380 is merged
|
||||
// There's also a test in clone.rs you need to turn too
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
@ -3071,3 +3065,151 @@ mod error_inside_fn_also_has_source_range_of_call_site_recursive {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod error_revolve_on_edge_get_edge {
|
||||
const TEST_NAME: &str = "error_revolve_on_edge_get_edge";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_with_pattern {
|
||||
const TEST_NAME: &str = "subtract_with_pattern";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_with_pattern_cut_thru {
|
||||
const TEST_NAME: &str = "subtract_with_pattern_cut_thru";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod sketch_on_face_union {
|
||||
const TEST_NAME: &str = "sketch_on_face_union";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod multi_target_csg {
|
||||
const TEST_NAME: &str = "multi_target_csg";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod revolve_colinear {
|
||||
const TEST_NAME: &str = "revolve-colinear";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore] // until https://github.com/KittyCAD/engine/pull/3417 lands
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_regression07 {
|
||||
const TEST_NAME: &str = "subtract_regression07";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ lazy_static::lazy_static! {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
fn parse(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
let write_new = matches!(
|
||||
std::env::var("INSTA_UPDATE").as_deref(),
|
||||
Ok("auto" | "always" | "new" | "unseen")
|
||||
@ -37,7 +37,7 @@ fn parse(dir_name: &str, dir_path: &Path) {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
async fn unparse(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
unparse_test(&t).await;
|
||||
}
|
||||
|
||||
@ -71,7 +71,7 @@ async fn unparse_test(test: &Test) {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
async fn kcl_test_execute(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
super::execute_test(&t, true, true).await;
|
||||
}
|
||||
|
||||
@ -129,12 +129,22 @@ fn test_after_engine_generate_manifest() {
|
||||
generate_kcl_manifest(&INPUTS_DIR).unwrap();
|
||||
}
|
||||
|
||||
fn test(test_name: &str, entry_point: String) -> Test {
|
||||
fn test(test_name: &str, entry_point: std::path::PathBuf) -> Test {
|
||||
let parent = std::fs::canonicalize(entry_point.parent().unwrap()).unwrap();
|
||||
let inputs_dir = std::fs::canonicalize(INPUTS_DIR.as_path()).unwrap();
|
||||
let relative_path = parent.strip_prefix(inputs_dir).unwrap();
|
||||
let output_dir = std::fs::canonicalize(OUTPUTS_DIR.as_path()).unwrap();
|
||||
let relative_output_dir = output_dir.join(relative_path);
|
||||
|
||||
// Ensure the output directory exists.
|
||||
if !relative_output_dir.exists() {
|
||||
std::fs::create_dir_all(&relative_output_dir).unwrap();
|
||||
}
|
||||
Test {
|
||||
name: test_name.to_owned(),
|
||||
entry_point,
|
||||
input_dir: INPUTS_DIR.join(test_name),
|
||||
output_dir: OUTPUTS_DIR.join(test_name),
|
||||
entry_point: entry_point.clone(),
|
||||
input_dir: parent.to_path_buf(),
|
||||
output_dir: relative_output_dir,
|
||||
}
|
||||
}
|
||||
|
||||
@ -173,8 +183,9 @@ fn kcl_samples_inputs() -> Vec<Test> {
|
||||
eprintln!("Found KCL sample: {:?}", dir_name.to_string_lossy());
|
||||
// Look for the entry point inside the directory.
|
||||
let sub_dir = INPUTS_DIR.join(dir_name);
|
||||
let entry_point = if sub_dir.join("main.kcl").exists() {
|
||||
"main.kcl".to_owned()
|
||||
let main_kcl_path = sub_dir.join("main.kcl");
|
||||
let entry_point = if main_kcl_path.exists() {
|
||||
main_kcl_path
|
||||
} else {
|
||||
panic!("No main.kcl found in {:?}", sub_dir);
|
||||
};
|
||||
|
@ -59,7 +59,9 @@ impl Arg {
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct KwArgs {
|
||||
/// Unlabeled keyword args. Currently only the first arg can be unlabeled.
|
||||
pub unlabeled: Option<Arg>,
|
||||
/// If the argument was a local variable, then the first element of the tuple is its name
|
||||
/// which may be used to treat this arg as a labelled arg.
|
||||
pub unlabeled: Option<(Option<String>, Arg)>,
|
||||
/// Labeled args.
|
||||
pub labeled: IndexMap<String, Arg>,
|
||||
pub errors: Vec<Arg>,
|
||||
@ -100,7 +102,7 @@ impl TyF64 {
|
||||
t => unreachable!("expected length, found {t:?}"),
|
||||
};
|
||||
|
||||
assert_ne!(len, UnitLen::Unknown);
|
||||
debug_assert_ne!(len, UnitLen::Unknown);
|
||||
|
||||
len.adjust_to(self.n, units).0
|
||||
}
|
||||
@ -112,7 +114,7 @@ impl TyF64 {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
assert_ne!(angle, UnitAngle::Unknown);
|
||||
debug_assert_ne!(angle, UnitAngle::Unknown);
|
||||
|
||||
angle.adjust_to(self.n, UnitAngle::Degrees).0
|
||||
}
|
||||
@ -124,7 +126,7 @@ impl TyF64 {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
assert_ne!(angle, UnitAngle::Unknown);
|
||||
debug_assert_ne!(angle, UnitAngle::Unknown);
|
||||
|
||||
angle.adjust_to(self.n, UnitAngle::Radians).0
|
||||
}
|
||||
@ -342,6 +344,7 @@ impl Args {
|
||||
self.kw_args
|
||||
.unlabeled
|
||||
.as_ref()
|
||||
.map(|(_, a)| a)
|
||||
.or(self.args.first())
|
||||
.or(self.pipe_value.as_ref())
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ async fn call_map_closure(
|
||||
ctxt: &ExecutorContext,
|
||||
) -> Result<KclValue, KclError> {
|
||||
let kw_args = KwArgs {
|
||||
unlabeled: Some(Arg::new(input, source_range)),
|
||||
unlabeled: Some((None, Arg::new(input, source_range))),
|
||||
labeled: Default::default(),
|
||||
errors: Vec::new(),
|
||||
};
|
||||
@ -104,7 +104,7 @@ async fn call_reduce_closure(
|
||||
let mut labeled = IndexMap::with_capacity(1);
|
||||
labeled.insert("accum".to_string(), Arg::new(accum, source_range));
|
||||
let kw_args = KwArgs {
|
||||
unlabeled: Some(Arg::new(elem, source_range)),
|
||||
unlabeled: Some((None, Arg::new(elem, source_range))),
|
||||
labeled,
|
||||
errors: Vec::new(),
|
||||
};
|
||||
|
@ -161,6 +161,7 @@ async fn fix_tags_and_references(
|
||||
},
|
||||
exec_state,
|
||||
args,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@ -177,25 +178,6 @@ async fn get_old_new_child_map(
|
||||
exec_state: &mut ExecState,
|
||||
args: &Args,
|
||||
) -> Result<HashMap<uuid::Uuid, uuid::Uuid>> {
|
||||
// Get the new geometries entity ids.
|
||||
let response = args
|
||||
.send_modeling_cmd(
|
||||
exec_state.next_uuid(),
|
||||
ModelingCmd::from(mcmd::EntityGetAllChildUuids {
|
||||
entity_id: new_geometry_id,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let OkWebSocketResponseData::Modeling {
|
||||
modeling_response:
|
||||
OkModelingCmdResponse::EntityGetAllChildUuids(EntityGetAllChildUuids {
|
||||
entity_ids: new_entity_ids,
|
||||
}),
|
||||
} = response
|
||||
else {
|
||||
anyhow::bail!("Expected EntityGetAllChildUuids response, got: {:?}", response);
|
||||
};
|
||||
|
||||
// Get the old geometries entity ids.
|
||||
let response = args
|
||||
.send_modeling_cmd(
|
||||
@ -215,6 +197,25 @@ async fn get_old_new_child_map(
|
||||
anyhow::bail!("Expected EntityGetAllChildUuids response, got: {:?}", response);
|
||||
};
|
||||
|
||||
// Get the new geometries entity ids.
|
||||
let response = args
|
||||
.send_modeling_cmd(
|
||||
exec_state.next_uuid(),
|
||||
ModelingCmd::from(mcmd::EntityGetAllChildUuids {
|
||||
entity_id: new_geometry_id,
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
let OkWebSocketResponseData::Modeling {
|
||||
modeling_response:
|
||||
OkModelingCmdResponse::EntityGetAllChildUuids(EntityGetAllChildUuids {
|
||||
entity_ids: new_entity_ids,
|
||||
}),
|
||||
} = response
|
||||
else {
|
||||
anyhow::bail!("Expected EntityGetAllChildUuids response, got: {:?}", response);
|
||||
};
|
||||
|
||||
// Create a map of old entity ids to new entity ids.
|
||||
Ok(HashMap::from_iter(
|
||||
old_entity_ids
|
||||
|
@ -14,7 +14,7 @@ use super::{args::TyF64, DEFAULT_TOLERANCE};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::RuntimeType, ExecState, KclValue, Solid},
|
||||
std::Args,
|
||||
std::{patterns::GeometryTrait, Args},
|
||||
};
|
||||
|
||||
/// Union two or more solids into a single solid.
|
||||
@ -123,7 +123,7 @@ pub(crate) async fn inner_union(
|
||||
let solid_out_id = exec_state.next_uuid();
|
||||
|
||||
let mut solid = solids[0].clone();
|
||||
solid.id = solid_out_id;
|
||||
solid.set_id(solid_out_id);
|
||||
let mut new_solids = vec![solid.clone()];
|
||||
|
||||
if args.ctx.no_engine_commands().await {
|
||||
@ -155,7 +155,7 @@ pub(crate) async fn inner_union(
|
||||
|
||||
// If we have more solids, set those as well.
|
||||
if !extra_solid_ids.is_empty() {
|
||||
solid.id = extra_solid_ids[0];
|
||||
solid.set_id(extra_solid_ids[0]);
|
||||
new_solids.push(solid.clone());
|
||||
}
|
||||
|
||||
@ -249,7 +249,7 @@ pub(crate) async fn inner_intersect(
|
||||
let solid_out_id = exec_state.next_uuid();
|
||||
|
||||
let mut solid = solids[0].clone();
|
||||
solid.id = solid_out_id;
|
||||
solid.set_id(solid_out_id);
|
||||
let mut new_solids = vec![solid.clone()];
|
||||
|
||||
if args.ctx.no_engine_commands().await {
|
||||
@ -281,7 +281,7 @@ pub(crate) async fn inner_intersect(
|
||||
|
||||
// If we have more solids, set those as well.
|
||||
if !extra_solid_ids.is_empty() {
|
||||
solid.id = extra_solid_ids[0];
|
||||
solid.set_id(extra_solid_ids[0]);
|
||||
new_solids.push(solid.clone());
|
||||
}
|
||||
|
||||
@ -293,20 +293,6 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
|
||||
let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?;
|
||||
let tools: Vec<Solid> = args.get_kw_arg_typed("tools", &RuntimeType::solids(), exec_state)?;
|
||||
|
||||
if solids.len() > 1 {
|
||||
return Err(KclError::UndefinedValue(KclErrorDetails {
|
||||
message: "Only one solid is allowed for a subtract operation, currently.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
if tools.len() > 1 {
|
||||
return Err(KclError::UndefinedValue(KclErrorDetails {
|
||||
message: "Only one tool is allowed for a subtract operation, currently.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
let tolerance: Option<TyF64> = args.get_kw_arg_opt_typed("tolerance", &RuntimeType::length(), exec_state)?;
|
||||
|
||||
let solids = inner_subtract(solids, tools, tolerance, exec_state, args).await?;
|
||||
@ -385,7 +371,7 @@ pub(crate) async fn inner_subtract(
|
||||
let solid_out_id = exec_state.next_uuid();
|
||||
|
||||
let mut solid = solids[0].clone();
|
||||
solid.id = solid_out_id;
|
||||
solid.set_id(solid_out_id);
|
||||
let mut new_solids = vec![solid.clone()];
|
||||
|
||||
if args.ctx.no_engine_commands().await {
|
||||
@ -419,7 +405,7 @@ pub(crate) async fn inner_subtract(
|
||||
|
||||
// If we have more solids, set those as well.
|
||||
if !extra_solid_ids.is_empty() {
|
||||
solid.id = extra_solid_ids[0];
|
||||
solid.set_id(extra_solid_ids[0]);
|
||||
new_solids.push(solid.clone());
|
||||
}
|
||||
|
||||
|
@ -220,6 +220,7 @@ async fn inner_extrude(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
None,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
@ -234,6 +235,7 @@ pub(crate) struct NamedCapTags<'a> {
|
||||
pub end: Option<&'a TagNode>,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn do_post_extrude<'a>(
|
||||
sketch: &Sketch,
|
||||
#[cfg(feature = "artifact-graph")] solid_id: ArtifactId,
|
||||
@ -242,6 +244,7 @@ pub(crate) async fn do_post_extrude<'a>(
|
||||
named_cap_tags: &'a NamedCapTags<'a>,
|
||||
exec_state: &mut ExecState,
|
||||
args: &Args,
|
||||
edge_id: Option<Uuid>,
|
||||
) -> Result<Solid, KclError> {
|
||||
// Bring the object to the front of the scene.
|
||||
// See: https://github.com/KittyCAD/modeling-app/issues/806
|
||||
@ -251,7 +254,9 @@ pub(crate) async fn do_post_extrude<'a>(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let any_edge_id = if let Some(edge_id) = sketch.mirror {
|
||||
let any_edge_id = if let Some(id) = edge_id {
|
||||
id
|
||||
} else if let Some(edge_id) = sketch.mirror {
|
||||
edge_id
|
||||
} else {
|
||||
// The "get extrusion face info" API call requires *any* edge on the sketch being extruded.
|
||||
|
@ -187,6 +187,7 @@ async fn inner_loft(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
None,
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
|