Compare commits
29 Commits
nrc-no-ang
...
codex/upda
Author | SHA1 | Date | |
---|---|---|---|
5135badef7 | |||
416de9a9fb | |||
da65426ddc | |||
585b485852 | |||
e85f16ff9c | |||
e7d2289a14 | |||
d35531758d | |||
729e0a7949 | |||
620b7401aa | |||
e3e67b00d5 | |||
49d4f8e5c3 | |||
47b159c605 | |||
c7b086fa69 | |||
203db79204 | |||
48a4fd8373 | |||
17eb84325f | |||
ebf048478d | |||
28a8cd2421 | |||
1506de92f5 | |||
8a03413643 | |||
f59b806a88 | |||
23a0085c78 | |||
a280a8c3f0 | |||
11620dfa6b | |||
f6e26e0bab | |||
f6b3a55cbf | |||
74939e5cd6 | |||
9906c9947a | |||
48d6a21f0a |
56
.github/workflows/build-wasm.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Build WASM
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
npm-build-wasm:
|
||||
runs-on: runs-on=${{ github.run_id }}/family=i7ie.2xlarge/image=ubuntu22-full-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # configured below
|
||||
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Use Rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: './rust'
|
||||
|
||||
- name: Build Wasm
|
||||
shell: bash
|
||||
run: npm run build:wasm
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-wasm
|
||||
path: |
|
||||
rust/kcl-wasm-lib/pkg/kcl_wasm_lib*
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-ts-rs-bindings
|
||||
path: |
|
||||
rust/kcl-lib/bindings/*
|
50
.github/workflows/cargo-test.yml
vendored
@ -155,7 +155,7 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
- name: Install rust
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
@ -190,6 +190,54 @@ jobs:
|
||||
TAB_API_KEY: ${{ secrets.TAB_API_KEY }}
|
||||
CI_COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
CI_PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run-internal-kcl-samples:
|
||||
name: cargo test (internal-kcl-samples)
|
||||
runs-on:
|
||||
- runs-on=${{ github.run_id }}
|
||||
- runner=32cpu-linux-x64
|
||||
- extras=s3-cache
|
||||
steps:
|
||||
- uses: runs-on/action@v1
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
private-key: ${{ secrets.MODELING_APP_GH_APP_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
- name: Install Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
- name: Start Vector
|
||||
run: .github/ci-cd-scripts/start-vector-ubuntu.sh
|
||||
env:
|
||||
GH_ACTIONS_AXIOM_TOKEN: ${{ secrets.GH_ACTIONS_AXIOM_TOKEN }}
|
||||
OS_NAME: ${{ env.OS_NAME }}
|
||||
- uses: taiki-e/install-action@nextest
|
||||
- name: Download internal KCL samples
|
||||
run: git clone --depth=1 https://x-access-token:${{ secrets.GH_PAT_KCL_SAMPLES_INTERNAL }}@github.com/KittyCAD/kcl-samples-internal public/kcl-samples/internal
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |-
|
||||
cd rust/kcl-lib
|
||||
cargo nextest run \
|
||||
--retries=10 --no-fail-fast --features artifact-graph --profile=ci \
|
||||
internal \
|
||||
2>&1 | tee /tmp/github-actions.log
|
||||
env:
|
||||
TWENTY_TWENTY: overwrite
|
||||
INSTA_UPDATE: always
|
||||
EXPECTORATE: overwrite
|
||||
KITTYCAD_API_TOKEN: ${{secrets.KITTYCAD_API_TOKEN_DEV}}
|
||||
ZOO_HOST: https://api.dev.zoo.dev
|
||||
MODELING_APP_INTERNAL_SAMPLES_SECRET: ${{secrets.MODELING_APP_INTERNAL_SAMPLES_SECRET}}
|
||||
run-wasm-tests:
|
||||
name: Run wasm tests
|
||||
strategy:
|
||||
|
61
.github/workflows/kcl-language-server.yml
vendored
@ -21,14 +21,11 @@ on:
|
||||
- '**.rs'
|
||||
- .github/workflows/kcl-language-server.yml
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
@ -38,10 +35,9 @@ env:
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.15
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: vscode tests
|
||||
name: kcl-language-server (vscode tests)
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -77,8 +73,7 @@ jobs:
|
||||
include:
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
code-target:
|
||||
win32-x64
|
||||
code-target: win32-x64
|
||||
#- os: windows-latest
|
||||
#target: i686-pc-windows-msvc
|
||||
#code-target:
|
||||
@ -88,8 +83,7 @@ jobs:
|
||||
#code-target: win32-arm64
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
code-target:
|
||||
linux-x64
|
||||
code-target: linux-x64
|
||||
#- os: ubuntu-latest
|
||||
#target: aarch64-unknown-linux-musl
|
||||
#code-target: linux-arm64
|
||||
@ -105,41 +99,33 @@ jobs:
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
code-target: darwin-arm64
|
||||
|
||||
name: build-release (${{ matrix.target }})
|
||||
name: kcl-language-server build-release (${{ matrix.target }})
|
||||
runs-on: ${{ matrix.os }}
|
||||
container: ${{ matrix.container }}
|
||||
|
||||
env:
|
||||
RA_TARGET: ${{ matrix.target }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
rm rust/rust-toolchain.toml
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: rust
|
||||
components: rust-src
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- name: Update apt repositories
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' || matrix.os == 'ubuntu-latest'
|
||||
run: sudo apt-get update
|
||||
|
||||
- if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
name: Install deps
|
||||
shell: bash
|
||||
@ -164,64 +150,53 @@ jobs:
|
||||
zlib1g-dev
|
||||
|
||||
cargo install cross
|
||||
|
||||
- name: Install AArch64 target toolchain
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
run: sudo apt-get install gcc-aarch64-linux-gnu
|
||||
|
||||
- name: Install ARM target toolchain
|
||||
if: matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get install gcc-arm-linux-gnueabihf
|
||||
|
||||
- name: build
|
||||
run: |
|
||||
cd rust
|
||||
cargo kcl-language-server-release build --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
# npm will symlink which will cause issues w tarballing later
|
||||
yarn install
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }}
|
||||
|
||||
- name: Package Extension (nightly)
|
||||
if: startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }} --pre-release
|
||||
|
||||
- name: remove server
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu'
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
rm -rf server
|
||||
|
||||
- name: Package Extension (no server, release)
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu' && startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o ../build/kcl-language-server-no-server.vsix
|
||||
|
||||
- name: Package Extension (no server, nightly)
|
||||
if: matrix.target == 'x86_64-unknown-linux-gnu' && startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o ../build/kcl-language-server-no-server.vsix --pre-release
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-${{ matrix.target }}
|
||||
path: ./rust/build
|
||||
|
||||
build-release-x86_64-unknown-linux-musl:
|
||||
name: build-release (x86_64-unknown-linux-musl)
|
||||
name: kcl-language-server build-release (x86_64-unknown-linux-musl)
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RA_TARGET: x86_64-unknown-linux-musl
|
||||
@ -231,7 +206,6 @@ jobs:
|
||||
image: alpine:latest
|
||||
volumes:
|
||||
- /usr/local/cargo/registry:/usr/local/cargo/registry
|
||||
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -245,55 +219,46 @@ jobs:
|
||||
nodejs \
|
||||
npm \
|
||||
yarn
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
rm rust/rust-toolchain.toml
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: rust
|
||||
components: rust-src
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: build
|
||||
run: |
|
||||
cd rust
|
||||
cargo kcl-language-server-release build --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
# npm will symlink which will cause issues w tarballing later
|
||||
yarn install
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/')
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-alpine-x64.vsix" --target alpine-x64
|
||||
|
||||
- name: Package Extension (release)
|
||||
if: startsWith(github.event.ref, 'refs/tags/') == false
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce package --yarn -o "../build/kcl-language-server-alpine-x64.vsix" --target alpine-x64 --pre-release
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-x86_64-unknown-linux-musl
|
||||
path: ./rust/build
|
||||
|
||||
publish:
|
||||
name: publish
|
||||
name: kcl-language-server (publish)
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["build-release", "build-release-x86_64-unknown-linux-musl"]
|
||||
if: startsWith(github.event.ref, 'refs/tags')
|
||||
@ -301,22 +266,17 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- run: echo "TAG=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
|
||||
|
||||
- run: 'echo "TAG: $TAG"'
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
|
||||
- run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
|
||||
- run: 'echo "HEAD_SHA: $HEAD_SHA"'
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-aarch64-apple-darwin
|
||||
@ -344,8 +304,7 @@ jobs:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-x86_64-pc-windows-msvc
|
||||
path:
|
||||
rust/build
|
||||
path: rust/build
|
||||
#- uses: actions/download-artifact@v4
|
||||
#with:
|
||||
#name: release-i686-pc-windows-msvc
|
||||
@ -356,21 +315,18 @@ jobs:
|
||||
#name: release-aarch64-pc-windows-msvc
|
||||
#path: rust/build
|
||||
- run: ls -al ./rust/build
|
||||
|
||||
- name: Publish Release
|
||||
uses: ./.github/actions/github-release
|
||||
with:
|
||||
files: "rust/build/*"
|
||||
name: ${{ env.TAG }}
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: move files to dir for upload
|
||||
shell: bash
|
||||
run: |
|
||||
cd rust
|
||||
mkdir -p releases/language-server/${{ env.TAG }}
|
||||
cp -r build/* releases/language-server/${{ env.TAG }}
|
||||
|
||||
- name: "Authenticate to Google Cloud"
|
||||
uses: "google-github-actions/auth@v2.1.8"
|
||||
with:
|
||||
@ -385,15 +341,12 @@ jobs:
|
||||
with:
|
||||
path: rust/releases
|
||||
destination: dl.kittycad.io
|
||||
|
||||
- run: rm rust/build/kcl-language-server-no-server.vsix
|
||||
|
||||
- name: Publish Extension (Code Marketplace, release)
|
||||
# token from https://dev.azure.com/kcl-language-server/
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
npx vsce publish --pat ${{ secrets.VSCE_PAT }} --packagePath ../build/kcl-language-server-*.vsix
|
||||
|
||||
- name: Publish Extension (OpenVSX, release)
|
||||
run: |
|
||||
cd rust/kcl-language-server
|
||||
|
14
.github/workflows/kcl-python-bindings.yml
vendored
@ -4,7 +4,6 @@
|
||||
# maturin generate-ci github
|
||||
#
|
||||
name: kcl-python-bindings
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@ -27,16 +26,14 @@ on:
|
||||
- '**.rs'
|
||||
- .github/workflows/kcl-python-bindings.yml
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
linux-x86_64:
|
||||
name: kcl-python-bindings (linux-x86_64)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -58,8 +55,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-linux-x86_64
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
windows:
|
||||
name: kcl-python-bindings (windows)
|
||||
runs-on: windows-16-cores
|
||||
strategy:
|
||||
matrix:
|
||||
@ -84,8 +81,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-windows-${{ matrix.target }}
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
macos:
|
||||
name: kcl-python-bindings (macos)
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@ -110,8 +107,8 @@ jobs:
|
||||
with:
|
||||
name: wheels-macos-${{ matrix.target }}
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
test:
|
||||
name: kcl-python-bindings (test)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -127,8 +124,8 @@ jobs:
|
||||
env:
|
||||
KITTYCAD_API_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
ZOO_HOST: https://api.dev.zoo.dev
|
||||
|
||||
sdist:
|
||||
name: kcl-python-bindings (sdist)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -151,7 +148,6 @@ jobs:
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: rust/kcl-python-bindings/dist
|
||||
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-latest
|
||||
|
167
.github/workflows/static-analysis.yml
vendored
@ -28,53 +28,7 @@ jobs:
|
||||
- run: npm run fmt:check
|
||||
|
||||
npm-build-wasm:
|
||||
# Build the wasm blob once on the fastest runner.
|
||||
runs-on: runs-on=${{ github.run_id }}/family=i7ie.2xlarge/image=ubuntu22-full-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Use correct Rust toolchain
|
||||
shell: bash
|
||||
run: |
|
||||
[ -e rust-toolchain.toml ] || cp rust/rust-toolchain.toml ./
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
cache: false # Configured below.
|
||||
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: './rust'
|
||||
|
||||
- name: Build Wasm
|
||||
shell: bash
|
||||
run: npm run build:wasm
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-wasm
|
||||
path: |
|
||||
rust/kcl-wasm-lib/pkg/kcl_wasm_lib*
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: prepared-ts-rs-bindings
|
||||
path: |
|
||||
rust/kcl-lib/bindings/*
|
||||
uses: ./.github/workflows/build-wasm.yml
|
||||
|
||||
npm-tsc:
|
||||
runs-on: ubuntu-latest
|
||||
@ -173,122 +127,3 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
- name: Run codespell
|
||||
uses: crate-ci/typos@v1.32.0
|
||||
|
||||
npm-unit-test-kcl-samples:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Download internal KCL samples
|
||||
run: git clone --depth=1 https://x-access-token:${{ secrets.GH_PAT_KCL_SAMPLES_INTERNAL }}@github.com/KittyCAD/kcl-samples-internal public/kcl-samples/internal
|
||||
|
||||
- name: Regenerate KCL samples manifest
|
||||
run: cd rust/kcl-lib && EXPECTORATE=overwrite cargo test generate_manifest
|
||||
|
||||
- name: Check public and internal KCL samples
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run test:unit:kcl-samples
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
npm-unit-test:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Run unit tests
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: xvfb-run -a npm run test:unit
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
- name: Check for changes
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
id: git-check
|
||||
run: |
|
||||
git add src/lang/std/artifactMapGraphs
|
||||
if git status src/lang/std/artifactMapGraphs | grep -q "Changes to be committed"
|
||||
then echo "modified=true" >> $GITHUB_OUTPUT
|
||||
else echo "modified=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Commit changes, if any
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' && steps.git-check.outputs.modified == 'true' }}
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git remote set-url origin https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git fetch origin
|
||||
echo ${{ github.head_ref }}
|
||||
git checkout ${{ github.head_ref }}
|
||||
# TODO when webkit works on ubuntu remove the os part of the commit message
|
||||
git commit -am "Look at this (photo)Graph *in the voice of Nickelback*" || true
|
||||
git push
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
124
.github/workflows/unit-tests.yml
vendored
Normal file
@ -0,0 +1,124 @@
|
||||
name: Unit Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: read
|
||||
|
||||
jobs:
|
||||
npm-build-wasm:
|
||||
uses: ./.github/workflows/build-wasm.yml
|
||||
|
||||
npm-test-unit:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- run: npm run simpleserver:bg
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
|
||||
- name: Install Chromium Browser
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: npm run playwright install chromium --with-deps
|
||||
|
||||
- name: Run unit tests
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
run: xvfb-run -a npm run test:unit
|
||||
env:
|
||||
VITE_KC_DEV_TOKEN: ${{ secrets.KITTYCAD_API_TOKEN_DEV }}
|
||||
|
||||
- name: Check for changes
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
|
||||
id: git-check
|
||||
run: |
|
||||
git add src/lang/std/artifactMapGraphs
|
||||
if git status src/lang/std/artifactMapGraphs | grep -q "Changes to be committed"
|
||||
then echo "modified=true" >> $GITHUB_OUTPUT
|
||||
else echo "modified=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Commit changes, if any
|
||||
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' && steps.git-check.outputs.modified == 'true' }}
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git remote set-url origin https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}.git
|
||||
git fetch origin
|
||||
echo ${{ github.head_ref }}
|
||||
git checkout ${{ github.head_ref }}
|
||||
# TODO when webkit works on ubuntu remove the os part of the commit message
|
||||
git commit -am "Look at this (photo)Graph *in the voice of Nickelback*" || true
|
||||
git push
|
||||
git push origin ${{ github.head_ref }}
|
||||
|
||||
npm-test-unit-components:
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-build-wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'npm'
|
||||
|
||||
- run: npm install
|
||||
- uses: taiki-e/install-action@d4635f2de61c8b8104d59cd4aede2060638378cc
|
||||
with:
|
||||
tool: wasm-pack
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Copy prepared wasm
|
||||
run: |
|
||||
ls -R prepared-wasm
|
||||
cp prepared-wasm/kcl_wasm_lib_bg.wasm public
|
||||
mkdir rust/kcl-wasm-lib/pkg
|
||||
cp prepared-wasm/kcl_wasm_lib* rust/kcl-wasm-lib/pkg
|
||||
|
||||
- name: Copy prepared ts-rs bindings
|
||||
run: |
|
||||
ls -R prepared-ts-rs-bindings
|
||||
mkdir rust/kcl-lib/bindings
|
||||
cp -r prepared-ts-rs-bindings/* rust/kcl-lib/bindings/
|
||||
|
||||
- name: Run component tests
|
||||
run: npm run test:unit:components
|
2
.gitignore
vendored
@ -58,6 +58,8 @@ trace.zip
|
||||
/public/kcl-samples/.github
|
||||
/public/kcl-samples/screenshots/main.kcl
|
||||
/public/kcl-samples/step/main.kcl
|
||||
/public/kcl-samples/internal
|
||||
/rust/kcl-lib/tests/kcl_samples/internal
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
|
1
Makefile
@ -114,7 +114,6 @@ test-unit: install ## Run the unit tests
|
||||
npm run test:unit:components
|
||||
@ curl -fs localhost:3000 >/dev/null || ( echo "Error: localhost:3000 not available, 'make run-web' first" && exit 1 )
|
||||
npm run test:unit
|
||||
npm run test:unit:kcl-samples
|
||||
|
||||
.PHONY: test-e2e
|
||||
test-e2e: test-e2e-$(TARGET)
|
||||
|
@ -29,6 +29,7 @@ THRE = "THRE" # Weird bug that wrongly detects THREEjs as a typo
|
||||
nwo = "nwo" # don't know what this is about tbh
|
||||
"ot" = "ot" # some abbreviation, idk what
|
||||
"oe" = "oe" # some abbreviation, idk what
|
||||
"colinear" = "colinear" # some engine shit, kidding
|
||||
|
||||
[default]
|
||||
extend-ignore-identifiers-re = [
|
||||
|
@ -256720,7 +256720,7 @@
|
||||
false
|
||||
],
|
||||
[
|
||||
"// Create a spring by sweeping around a helix path.\n\n// Create a helix around the Z axis.\nhelixPath = helix(\n angleStart = 0,\n ccw = true,\n revolutions = 4,\n length = 10,\n radius = 5,\n axis = Z,\n)\n\n// Create a spring by sweeping around the helix path.\nspringSketch = startSketchOn(YZ)\n |> circle(center = [0, 0], radius = 1)\n |> sweep(path = helixPath, relativeTo = \"sketchPlane\")",
|
||||
"// Create a spring by sweeping around a helix path.\n\n// Create a helix around the Z axis.\nhelixPath = helix(\n angleStart = 0,\n ccw = true,\n revolutions = 4,\n length = 10,\n radius = 5,\n axis = Z,\n)\n\n// Create a spring by sweeping around the helix path.\nspringSketch = startSketchOn(XZ)\n |> circle(center = [5, 0], radius = 1)\n |> sweep(path = helixPath)",
|
||||
false
|
||||
],
|
||||
[
|
||||
|
@ -65,7 +65,9 @@ test(
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
|
||||
// Check for the exported file
|
||||
const firstFileFullPath = path.resolve(
|
||||
@ -134,7 +136,9 @@ test(
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
await expect(exportingToastMessage).not.toBeVisible()
|
||||
|
||||
// Check for the exported file=
|
||||
|
@ -197,18 +197,6 @@ test.describe(
|
||||
await clickElectronNativeMenuById(tronApp, 'File.Export current part')
|
||||
await cmdBar.expectCommandName('Export')
|
||||
})
|
||||
await test.step('Modeling.File.Share part via Zoo link', async () => {
|
||||
await page.waitForTimeout(250)
|
||||
await clickElectronNativeMenuById(
|
||||
tronApp,
|
||||
'File.Share part via Zoo link'
|
||||
)
|
||||
const textToCheck =
|
||||
'Link copied to clipboard. Anyone who clicks this link will get a copy of this file. Share carefully!'
|
||||
// Check if text appears anywhere in the page
|
||||
const isTextVisible = page.getByText(textToCheck)
|
||||
await expect(isTextVisible).toBeVisible({ timeout: 10000 })
|
||||
})
|
||||
await test.step('Modeling.File.Preferences.Project settings', async () => {
|
||||
await page.waitForTimeout(250)
|
||||
await clickElectronNativeMenuById(
|
||||
|
@ -6,7 +6,6 @@ test.describe('Onboarding tests', () => {
|
||||
homePage,
|
||||
toolbar,
|
||||
editor,
|
||||
scene,
|
||||
tronApp,
|
||||
}) => {
|
||||
if (!tronApp) {
|
||||
@ -62,7 +61,6 @@ test.describe('Onboarding tests', () => {
|
||||
await editor.expectEditor.toContain('@settings(defaultLengthUnit = in)', {
|
||||
shouldNormalise: true,
|
||||
})
|
||||
await scene.connectionEstablished()
|
||||
})
|
||||
|
||||
await test.step('Go home and verify we still see the tutorial button, then begin it.', async () => {
|
||||
@ -132,9 +130,7 @@ test.describe('Onboarding tests', () => {
|
||||
})
|
||||
|
||||
await test.step('Dismiss the onboarding', async () => {
|
||||
await postDismissToast.waitFor({ state: 'hidden' })
|
||||
await page.keyboard.press('Escape')
|
||||
await expect(postDismissToast).toBeVisible()
|
||||
await expect(page.getByTestId('onboarding-content')).not.toBeVisible()
|
||||
await expect.poll(() => page.url()).not.toContain('/onboarding')
|
||||
})
|
||||
@ -162,13 +158,10 @@ test.describe('Onboarding tests', () => {
|
||||
await test.step('Gets to the onboarding start', async () => {
|
||||
await expect(toolbar.projectName).toContainText('tutorial-project')
|
||||
await expect(tutorialWelcomeHeading).toBeVisible()
|
||||
await scene.connectionEstablished()
|
||||
})
|
||||
|
||||
await test.step('Dismiss the onboarding', async () => {
|
||||
await postDismissToast.waitFor({ state: 'hidden' })
|
||||
await page.keyboard.press('Escape')
|
||||
await expect(postDismissToast).toBeVisible()
|
||||
await expect(page.getByTestId('onboarding-content')).not.toBeVisible()
|
||||
await expect.poll(() => page.url()).not.toContain('/onboarding')
|
||||
})
|
||||
|
@ -99,6 +99,8 @@ test.describe('edit with AI example snapshots', () => {
|
||||
await test.step('fire off edit prompt', async () => {
|
||||
await cmdBar.captureTextToCadRequestSnapshot(test.info())
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
// being specific about the color with a hex means asserting pixel color is more stable
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
|
@ -88,6 +88,8 @@ test.describe('Prompt-to-edit tests', () => {
|
||||
|
||||
await test.step('fire off edit prompt', async () => {
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
// being specific about the color with a hex means asserting pixel color is more stable
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
@ -165,6 +167,8 @@ test.describe('Prompt-to-edit tests', () => {
|
||||
|
||||
await test.step('fire of bad prompt', async () => {
|
||||
await cmdBar.openCmdBar('promptToEdit')
|
||||
await page.waitForTimeout(100)
|
||||
await cmdBar.progressCmdBar()
|
||||
await page
|
||||
.getByTestId('cmd-bar-arg-value')
|
||||
.fill('ansheusha asnthuatshoeuhtaoetuhthaeu laughs in dvorak')
|
||||
|
@ -458,12 +458,10 @@ extrude002 = extrude(profile002, length = 150)
|
||||
|
||||
// Click the stl.
|
||||
await expect(stlOption).toBeVisible()
|
||||
|
||||
await page.keyboard.press('Enter')
|
||||
|
||||
// Click the checkbox
|
||||
await expect(submitButton).toBeVisible()
|
||||
|
||||
await page.keyboard.press('Enter')
|
||||
|
||||
// Find the toast.
|
||||
@ -471,11 +469,13 @@ extrude002 = extrude(profile002, length = 150)
|
||||
await expect(exportingToastMessage).toBeVisible()
|
||||
|
||||
// Expect it to succeed.
|
||||
await expect(exportingToastMessage).not.toBeVisible({ timeout: 15_000 })
|
||||
await expect(exportingToastMessage).not.toBeVisible()
|
||||
await expect(engineErrorToastMessage).not.toBeVisible()
|
||||
|
||||
const successToastMessage = page.getByText(`Exported successfully`)
|
||||
await expect(successToastMessage).toBeVisible()
|
||||
await page.waitForTimeout(1_000)
|
||||
const count = await successToastMessage.count()
|
||||
await expect(count).toBeGreaterThanOrEqual(1)
|
||||
}
|
||||
)
|
||||
// We updated this test such that you can have multiple exports going at once.
|
||||
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 56 KiB |
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 47 KiB After Width: | Height: | Size: 47 KiB |
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 52 KiB After Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
Before Width: | Height: | Size: 74 KiB After Width: | Height: | Size: 74 KiB |
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 132 KiB |
Before Width: | Height: | Size: 116 KiB After Width: | Height: | Size: 116 KiB |
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 132 KiB |
Before Width: | Height: | Size: 65 KiB After Width: | Height: | Size: 65 KiB |
Before Width: | Height: | Size: 68 KiB After Width: | Height: | Size: 68 KiB |
Before Width: | Height: | Size: 64 KiB After Width: | Height: | Size: 64 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 66 KiB |
Before Width: | Height: | Size: 67 KiB After Width: | Height: | Size: 67 KiB |
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 63 KiB |
@ -551,11 +551,6 @@ export async function getUtils(page: Page, test_?: typeof test) {
|
||||
|
||||
createNewFile: async (name: string) => {
|
||||
return test?.step(`Create a file named ${name}`, async () => {
|
||||
// If the application is in the middle of connecting a stream
|
||||
// then creating a new file won't work in the end.
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'Start Sketch' })
|
||||
).not.toBeDisabled()
|
||||
await page.getByTestId('create-file-button').click()
|
||||
await page.getByTestId('tree-input-field').fill(name)
|
||||
await page.keyboard.press('Enter')
|
||||
|
@ -94,7 +94,6 @@
|
||||
"build:wasm:dev": "./scripts/build-wasm-dev.sh",
|
||||
"build:wasm:dev:windows": "powershell -ExecutionPolicy Bypass -File ./scripts/build-wasm-dev.ps1",
|
||||
"pretest": "npm run remove-importmeta",
|
||||
"test:rust": "(cd rust && just test && just lint)",
|
||||
"simpleserver": "npm run pretest && http-server ./public --cors -p 3000",
|
||||
"simpleserver:ci": "npm run pretest && http-server ./public --cors -p 3000 &",
|
||||
"simpleserver:bg": "npm run pretest && http-server ./public --cors -p 3000 &",
|
||||
@ -130,15 +129,14 @@
|
||||
"tronb:package:prod": "npm run tronb:vite:prod && electron-builder --config electron-builder.yml --publish always",
|
||||
"test-setup": "npm install && npm run build:wasm",
|
||||
"test": "vitest --mode development",
|
||||
"test:rust": "(cd rust && just test && just lint)",
|
||||
"test:snapshots": "PLATFORM=web NODE_ENV=development playwright test --config=playwright.config.ts --grep=@snapshot --trace=on --shard=1/1",
|
||||
"test:unit": "vitest run --mode development --exclude **/kclSamples.test.ts --exclude **/jest-component-unit-tests/*",
|
||||
"test:unit": "vitest run --mode development --exclude **/jest-component-unit-tests/*",
|
||||
"test:unit:components": "jest -c jest-component-unit-tests/jest.config.ts --rootDir jest-component-unit-tests/",
|
||||
"test:unit:kcl-samples": "vitest run --mode development ./src/lang/kclSamples.test.ts",
|
||||
"test:playwright:electron": "playwright test --config=playwright.electron.config.ts --grep-invert=@snapshot",
|
||||
"test:playwright:electron:local": "npm run tronb:vite:dev && playwright test --config=playwright.electron.config.ts --grep-invert=@snapshot --grep-invert=\"$(curl --silent https://test-analysis-bot.hawk-dinosaur.ts.net/projects/KittyCAD/modeling-app/tests/disabled/regex)\"",
|
||||
"test:playwright:electron:local-engine": "npm run tronb:vite:dev && playwright test --config=playwright.electron.config.ts --grep-invert='@snapshot|@skipLocalEngine' --grep-invert=\"$(curl --silent https://test-analysis-bot.hawk-dinosaur.ts.net/projects/KittyCAD/modeling-app/tests/disabled/regex)\"",
|
||||
"test:unit:local": "npm run simpleserver:bg && npm run test:unit; kill-port 3000",
|
||||
"test:unit:kcl-samples:local": "npm run simpleserver:bg && npm run test:unit:kcl-samples; kill-port 3000"
|
||||
"test:unit:local": "npm run simpleserver:bg && npm run test:unit; kill-port 3000"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
123
rust/Cargo.lock
generated
@ -1815,7 +1815,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-bumper"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1826,7 +1826,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-derive-docs"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"anyhow",
|
||||
@ -1845,8 +1845,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-directory-test-macro"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
@ -1854,7 +1855,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-language-server"
|
||||
version = "0.2.73"
|
||||
version = "0.2.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1875,7 +1876,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-language-server-release"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -1895,7 +1896,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.2.73"
|
||||
version = "0.2.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx 0.5.1",
|
||||
@ -1934,6 +1935,7 @@ dependencies = [
|
||||
"measurements",
|
||||
"miette",
|
||||
"mime_guess",
|
||||
"nalgebra-glm",
|
||||
"parse-display 0.10.0",
|
||||
"pretty_assertions",
|
||||
"pyo3",
|
||||
@ -1971,7 +1973,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-python-bindings"
|
||||
version = "0.3.73"
|
||||
version = "0.3.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"kcl-lib",
|
||||
@ -1986,7 +1988,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-test-server"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"hyper 0.14.32",
|
||||
@ -1999,7 +2001,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-to-core"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -2013,7 +2015,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-wasm-lib"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bson",
|
||||
@ -2253,6 +2255,16 @@ dependencies = [
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matrixmultiply"
|
||||
version = "0.3.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a06de3016e9fae57a36fd14dba131fccf49f74b40b7fbdb472f96e361ec71a08"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"rawpointer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "measurements"
|
||||
version = "0.11.0"
|
||||
@ -2373,6 +2385,33 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nalgebra"
|
||||
version = "0.33.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26aecdf64b707efd1310e3544d709c5c0ac61c13756046aaaba41be5c4f66a3b"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"matrixmultiply",
|
||||
"num-complex",
|
||||
"num-rational",
|
||||
"num-traits 0.2.19",
|
||||
"simba",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nalgebra-glm"
|
||||
version = "0.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e441f43bccdf40cb6bd4294321e6983c5bc7b9886112d19fd4c9813976b117e4"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"nalgebra",
|
||||
"num-traits 0.2.19",
|
||||
"simba",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "newline-converter"
|
||||
version = "0.3.0"
|
||||
@ -2412,6 +2451,15 @@ dependencies = [
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
|
||||
dependencies = [
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
@ -2442,6 +2490,17 @@ dependencies = [
|
||||
"num-modular",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-integer",
|
||||
"num-traits 0.2.19",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.1.43"
|
||||
@ -2595,6 +2654,12 @@ dependencies = [
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||
|
||||
[[package]]
|
||||
name = "pbkdf2"
|
||||
version = "0.12.2"
|
||||
@ -3093,6 +3158,12 @@ dependencies = [
|
||||
"getrandom 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rawpointer"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.10.0"
|
||||
@ -3376,6 +3447,15 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "safe_arch"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
@ -3631,6 +3711,19 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simba"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3a386a501cd104797982c15ae17aafe8b9261315b5d07e3ec803f2ea26be0fa"
|
||||
dependencies = [
|
||||
"approx 0.5.1",
|
||||
"num-complex",
|
||||
"num-traits 0.2.19",
|
||||
"paste",
|
||||
"wide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
@ -4731,6 +4824,16 @@ dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wide"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41b5576b9a81633f3e8df296ce0063042a73507636cbe956c61133dd7034ab22"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"safe_arch",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
|
@ -1,7 +1,7 @@
|
||||
|
||||
[package]
|
||||
name = "kcl-bumper"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/KittyCAD/modeling-api"
|
||||
rust-version = "1.76"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-derive-docs"
|
||||
description = "A tool for generating documentation from Rust derive macros"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-directory-test-macro"
|
||||
description = "A tool for generating tests from a directory of kcl files"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -11,6 +11,7 @@ proc-macro = true
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
convert_case = "0.8.0"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "2.0.96", features = ["full"] }
|
||||
|
@ -1,10 +1,13 @@
|
||||
use std::fs;
|
||||
|
||||
use convert_case::Casing;
|
||||
use proc_macro::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use syn::{parse_macro_input, LitStr};
|
||||
|
||||
/// A macro that generates test functions for each directory within a given path.
|
||||
/// To be included the test directory must have a main.kcl file.
|
||||
/// This will also recursively search for directories within the given path.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
@ -45,7 +48,11 @@ pub fn test_all_dirs(attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
|
||||
// Generate a test function for each directory
|
||||
let test_fns = dirs.iter().map(|(dir_name, dir_path)| {
|
||||
let test_fn_name = format_ident!("{}_{}", fn_name, sanitize_dir_name(dir_name));
|
||||
let relative_path = dir_path
|
||||
.strip_prefix(&path.to_string_lossy().to_string())
|
||||
.unwrap()
|
||||
.trim();
|
||||
let test_fn_name = format_ident!("{}_{}", fn_name, sanitize_dir_name(relative_path));
|
||||
let dir_name_str = dir_name.clone();
|
||||
let dir_path_str = dir_path.clone();
|
||||
|
||||
@ -75,16 +82,26 @@ fn get_all_directories(path: &std::path::Path) -> Result<Vec<(String, String)>,
|
||||
|
||||
for entry in fs::read_dir(path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let new_path = entry.path();
|
||||
|
||||
if path.is_dir() && !IGNORE_DIRS.contains(&path.file_name().and_then(|name| name.to_str()).unwrap_or("")) {
|
||||
let dir_name = path
|
||||
if new_path.is_dir()
|
||||
&& !IGNORE_DIRS.contains(&new_path.file_name().and_then(|name| name.to_str()).unwrap_or(""))
|
||||
{
|
||||
// Check if the directory contains a main.kcl file.
|
||||
let main_kcl_path = new_path.join("main.kcl");
|
||||
if !main_kcl_path.exists() {
|
||||
// Recurse into the directory.
|
||||
let sub_dirs = get_all_directories(&new_path)?;
|
||||
dirs.extend(sub_dirs);
|
||||
continue;
|
||||
}
|
||||
let dir_name = new_path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
let dir_path = path.to_str().unwrap_or("unknown").to_string();
|
||||
let dir_path = new_path.to_str().unwrap_or("unknown").to_string();
|
||||
|
||||
dirs.push((dir_name, dir_path));
|
||||
}
|
||||
@ -95,10 +112,9 @@ fn get_all_directories(path: &std::path::Path) -> Result<Vec<(String, String)>,
|
||||
|
||||
/// Sanitize directory name to create a valid Rust identifier
|
||||
fn sanitize_dir_name(name: &str) -> String {
|
||||
let name = name.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "_");
|
||||
if name.chars().next().is_some_and(|c| c.is_numeric()) {
|
||||
format!("d_{}", name)
|
||||
} else {
|
||||
name
|
||||
}
|
||||
let binding = name
|
||||
.replace(|c: char| !c.is_ascii_alphanumeric() && c != '_', "_")
|
||||
.replace("/", "_");
|
||||
let name = binding.trim_start_matches('_').to_string();
|
||||
name.to_case(convert_case::Case::Snake)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "kcl-language-server-release"
|
||||
version = "0.1.73"
|
||||
version = "0.1.74"
|
||||
edition = "2021"
|
||||
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
|
||||
publish = false
|
||||
|
@ -2,7 +2,7 @@
|
||||
name = "kcl-language-server"
|
||||
description = "A language server for KCL."
|
||||
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
|
||||
version = "0.2.73"
|
||||
version = "0.2.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.2.73"
|
||||
version = "0.2.74"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -50,6 +50,7 @@ lazy_static = { workspace = true }
|
||||
measurements = "0.11.0"
|
||||
miette = { workspace = true }
|
||||
mime_guess = "2.0.5"
|
||||
nalgebra-glm = "0.19.0"
|
||||
parse-display = "0.10.0"
|
||||
pyo3 = { workspace = true, optional = true }
|
||||
regex = "1.11.1"
|
||||
|
@ -1,5 +1,7 @@
|
||||
//! Cache testing framework.
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use kcl_lib::NodePathStep;
|
||||
use kcl_lib::{bust_cache, ExecError, ExecOutcome};
|
||||
use kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
@ -259,18 +261,23 @@ extrude(profile001, length = 100)"#
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_cache_add_line_preserves_artifact_commands() {
|
||||
let code = r#"sketch001 = startSketchOn(XY)
|
||||
|> startProfile(at = [5.5229, 5.25217])
|
||||
|> line(end = [10.50433, -1.19122])
|
||||
|> line(end = [8.01362, -5.48731])
|
||||
|> line(end = [-1.02877, -6.76825])
|
||||
|> line(end = [-11.53311, 2.81559])
|
||||
profile001 = startProfile(sketch001, at = [5.5, 5.25])
|
||||
|> line(end = [10.5, -1.19])
|
||||
|> line(end = [8, -5.5])
|
||||
|> line(end = [-1.02, -6.76])
|
||||
|> line(end = [-11.5, 2.8])
|
||||
|> close()
|
||||
plane001 = offsetPlane(XY, offset = 20)
|
||||
"#;
|
||||
// Use a new statement; don't extend the prior pipeline. This allows us to
|
||||
// detect a prefix.
|
||||
let code_with_extrude = code.to_owned()
|
||||
+ r#"
|
||||
extrude(sketch001, length = 4)
|
||||
profile002 = startProfile(plane001, at = [0, 0])
|
||||
|> line(end = [0, 10])
|
||||
|> line(end = [10, 0])
|
||||
|> close()
|
||||
extrude001 = extrude(profile001, length = 4)
|
||||
"#;
|
||||
|
||||
let result = cache_test(
|
||||
@ -305,6 +312,58 @@ extrude(sketch001, length = 4)
|
||||
first.artifact_graph.len(),
|
||||
second.artifact_graph.len()
|
||||
);
|
||||
// Make sure we have NodePaths referring to the old code.
|
||||
let graph = &second.artifact_graph;
|
||||
assert!(!graph.is_empty());
|
||||
for artifact in graph.values() {
|
||||
assert!(!artifact.code_ref().map(|c| c.node_path.is_empty()).unwrap_or(false));
|
||||
assert!(
|
||||
!artifact
|
||||
.face_code_ref()
|
||||
// TODO: This fails, but it shouldn't.
|
||||
// .map(|c| c.node_path.is_empty())
|
||||
// Allowing the NodePath to be empty if the SourceRange is [0,
|
||||
// 0] as a more lenient check.
|
||||
.map(|c| !c.range.is_synthetic() && c.node_path.is_empty())
|
||||
.unwrap_or(false),
|
||||
"artifact={:?}",
|
||||
artifact
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_cache_add_offset_plane_computes_node_path() {
|
||||
let code = r#"sketch001 = startSketchOn(XY)
|
||||
profile001 = startProfile(sketch001, at = [0, 0])
|
||||
"#;
|
||||
let code_with_more = code.to_owned()
|
||||
+ r#"plane001 = offsetPlane(XY, offset = 500)
|
||||
"#;
|
||||
|
||||
let result = cache_test(
|
||||
"add_offset_plane_preserves_artifact_commands",
|
||||
vec![
|
||||
Variation {
|
||||
code,
|
||||
other_files: vec![],
|
||||
settings: &Default::default(),
|
||||
},
|
||||
Variation {
|
||||
code: code_with_more.as_str(),
|
||||
other_files: vec![],
|
||||
settings: &Default::default(),
|
||||
},
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
let second = &result.last().unwrap().2;
|
||||
|
||||
let v = second.artifact_graph.values().collect::<Vec<_>>();
|
||||
let path_step = &v[2].code_ref().unwrap().node_path.steps[0];
|
||||
assert_eq!(*path_step, NodePathStep::ProgramBodyItem { index: 2 });
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 33 KiB |
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
After Width: | Height: | Size: 19 KiB |
After Width: | Height: | Size: 26 KiB |
@ -129,6 +129,7 @@ impl From<KclErrorWithOutputs> for KclError {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct KclErrorWithOutputs {
|
||||
pub error: KclError,
|
||||
pub non_fatal: Vec<CompilationError>,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub operations: Vec<Operation>,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -141,8 +142,10 @@ pub struct KclErrorWithOutputs {
|
||||
}
|
||||
|
||||
impl KclErrorWithOutputs {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
error: KclError,
|
||||
non_fatal: Vec<CompilationError>,
|
||||
#[cfg(feature = "artifact-graph")] operations: Vec<Operation>,
|
||||
#[cfg(feature = "artifact-graph")] artifact_commands: Vec<ArtifactCommand>,
|
||||
#[cfg(feature = "artifact-graph")] artifact_graph: ArtifactGraph,
|
||||
@ -152,6 +155,7 @@ impl KclErrorWithOutputs {
|
||||
) -> Self {
|
||||
Self {
|
||||
error,
|
||||
non_fatal,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations,
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -166,6 +170,7 @@ impl KclErrorWithOutputs {
|
||||
pub fn no_outputs(error: KclError) -> Self {
|
||||
Self {
|
||||
error,
|
||||
non_fatal: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
operations: Default::default(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
|
@ -115,7 +115,7 @@ where
|
||||
seq.end()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq, ts_rs::TS)]
|
||||
#[derive(Debug, Clone, Default, Serialize, PartialEq, Eq, ts_rs::TS)]
|
||||
#[ts(export_to = "Artifact.ts")]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CodeRef {
|
||||
@ -396,7 +396,6 @@ pub enum Artifact {
|
||||
Cap(Cap),
|
||||
SweepEdge(SweepEdge),
|
||||
EdgeCut(EdgeCut),
|
||||
#[expect(unused)]
|
||||
EdgeCutEdge(EdgeCutEdge),
|
||||
Helix(Helix),
|
||||
}
|
||||
@ -550,8 +549,9 @@ impl Artifact {
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
pub(crate) fn code_ref(&self) -> Option<&CodeRef> {
|
||||
/// The [`CodeRef`] for the artifact itself. See also
|
||||
/// [`Self::face_code_ref`].
|
||||
pub fn code_ref(&self) -> Option<&CodeRef> {
|
||||
match self {
|
||||
Artifact::CompositeSolid(a) => Some(&a.code_ref),
|
||||
Artifact::Plane(a) => Some(&a.code_ref),
|
||||
@ -570,6 +570,24 @@ impl Artifact {
|
||||
}
|
||||
}
|
||||
|
||||
/// The [`CodeRef`] referring to the face artifact that it's on, not the
|
||||
/// artifact itself.
|
||||
pub fn face_code_ref(&self) -> Option<&CodeRef> {
|
||||
match self {
|
||||
Artifact::CompositeSolid(_)
|
||||
| Artifact::Plane(_)
|
||||
| Artifact::Path(_)
|
||||
| Artifact::Segment(_)
|
||||
| Artifact::Solid2d(_)
|
||||
| Artifact::StartSketchOnFace(_)
|
||||
| Artifact::StartSketchOnPlane(_)
|
||||
| Artifact::Sweep(_) => None,
|
||||
Artifact::Wall(a) => Some(&a.face_code_ref),
|
||||
Artifact::Cap(a) => Some(&a.face_code_ref),
|
||||
Artifact::SweepEdge(_) | Artifact::EdgeCut(_) | Artifact::EdgeCutEdge(_) | Artifact::Helix(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge the new artifact into self. If it can't because it's a different
|
||||
/// type, return the new artifact which should be used as a replacement.
|
||||
fn merge(&mut self, new: Artifact) -> Option<Artifact> {
|
||||
@ -704,6 +722,19 @@ impl ArtifactGraph {
|
||||
self.map.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.map.is_empty()
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = &Artifact> {
|
||||
self.map.values()
|
||||
}
|
||||
|
||||
/// Consume the artifact graph and return the map of artifacts.
|
||||
fn into_map(self) -> IndexMap<ArtifactId, Artifact> {
|
||||
self.map
|
||||
}
|
||||
|
||||
/// Used to make the mermaid tests deterministic.
|
||||
#[cfg(test)]
|
||||
pub(crate) fn sort(&mut self) {
|
||||
@ -712,17 +743,30 @@ impl ArtifactGraph {
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the artifact graph from the artifact commands and the responses. The
|
||||
/// initial graph is the graph cached from a previous execution. NodePaths of
|
||||
/// `exec_artifacts` are filled in from the AST.
|
||||
pub(super) fn build_artifact_graph(
|
||||
artifact_commands: &[ArtifactCommand],
|
||||
responses: &IndexMap<Uuid, WebSocketResponse>,
|
||||
ast: &Node<Program>,
|
||||
exec_artifacts: &IndexMap<ArtifactId, Artifact>,
|
||||
cached_body_items: usize,
|
||||
exec_artifacts: &mut IndexMap<ArtifactId, Artifact>,
|
||||
initial_graph: ArtifactGraph,
|
||||
) -> Result<ArtifactGraph, KclError> {
|
||||
let mut map = IndexMap::new();
|
||||
let mut map = initial_graph.into_map();
|
||||
|
||||
let mut path_to_plane_id_map = FnvHashMap::default();
|
||||
let mut current_plane_id = None;
|
||||
|
||||
// Fill in NodePaths for artifacts that were added directly to the map
|
||||
// during execution.
|
||||
for exec_artifact in exec_artifacts.values_mut() {
|
||||
// Note: We only have access to the new AST. So if these artifacts
|
||||
// somehow came from cached AST, this won't fill in anything.
|
||||
fill_in_node_paths(exec_artifact, ast, cached_body_items);
|
||||
}
|
||||
|
||||
for artifact_command in artifact_commands {
|
||||
if let ModelingCmd::EnableSketchMode(EnableSketchMode { entity_id, .. }) = artifact_command.command {
|
||||
current_plane_id = Some(entity_id);
|
||||
@ -747,6 +791,7 @@ pub(super) fn build_artifact_graph(
|
||||
&flattened_responses,
|
||||
&path_to_plane_id_map,
|
||||
ast,
|
||||
cached_body_items,
|
||||
exec_artifacts,
|
||||
)?;
|
||||
for artifact in artifact_updates {
|
||||
@ -762,6 +807,26 @@ pub(super) fn build_artifact_graph(
|
||||
Ok(ArtifactGraph { map })
|
||||
}
|
||||
|
||||
/// These may have been created with placeholder `CodeRef`s because we didn't
|
||||
/// have the entire AST available. Now we fill them in.
|
||||
fn fill_in_node_paths(artifact: &mut Artifact, program: &Node<Program>, cached_body_items: usize) {
|
||||
match artifact {
|
||||
Artifact::StartSketchOnFace(face) => {
|
||||
if face.code_ref.node_path.is_empty() {
|
||||
face.code_ref.node_path =
|
||||
NodePath::from_range(program, cached_body_items, face.code_ref.range).unwrap_or_default();
|
||||
}
|
||||
}
|
||||
Artifact::StartSketchOnPlane(plane) => {
|
||||
if plane.code_ref.node_path.is_empty() {
|
||||
plane.code_ref.node_path =
|
||||
NodePath::from_range(program, cached_body_items, plane.code_ref.range).unwrap_or_default();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Flatten the responses into a map of command IDs to modeling command
|
||||
/// responses. The raw responses from the engine contain batches.
|
||||
fn flatten_modeling_command_responses(
|
||||
@ -844,28 +909,28 @@ fn artifacts_to_update(
|
||||
responses: &FnvHashMap<Uuid, OkModelingCmdResponse>,
|
||||
path_to_plane_id_map: &FnvHashMap<Uuid, Uuid>,
|
||||
ast: &Node<Program>,
|
||||
cached_body_items: usize,
|
||||
exec_artifacts: &IndexMap<ArtifactId, Artifact>,
|
||||
) -> Result<Vec<Artifact>, KclError> {
|
||||
let uuid = artifact_command.cmd_id;
|
||||
let Some(response) = responses.get(&uuid) else {
|
||||
// Response not found or not successful.
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
// TODO: Build path-to-node from artifact_command source range. Right now,
|
||||
// we're serializing an empty array, and the TS wrapper fills it in with the
|
||||
// correct value based on NodePath.
|
||||
let path_to_node = Vec::new();
|
||||
let range = artifact_command.range;
|
||||
let node_path = NodePath::from_range(ast, range).unwrap_or_default();
|
||||
let node_path = NodePath::from_range(ast, cached_body_items, range).unwrap_or_default();
|
||||
let code_ref = CodeRef {
|
||||
range,
|
||||
node_path,
|
||||
path_to_node,
|
||||
};
|
||||
|
||||
let uuid = artifact_command.cmd_id;
|
||||
let id = ArtifactId::new(uuid);
|
||||
|
||||
let Some(response) = responses.get(&uuid) else {
|
||||
// Response not found or not successful.
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
|
||||
let cmd = &artifact_command.command;
|
||||
|
||||
match cmd {
|
||||
@ -1100,16 +1165,19 @@ fn artifacts_to_update(
|
||||
let extra_artifact = exec_artifacts.values().find(|a| {
|
||||
if let Artifact::StartSketchOnFace(s) = a {
|
||||
s.face_id == face_id
|
||||
} else if let Artifact::StartSketchOnPlane(s) = a {
|
||||
s.plane_id == face_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let sketch_on_face_source_range = extra_artifact
|
||||
let sketch_on_face_code_ref = extra_artifact
|
||||
.and_then(|a| match a {
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.range),
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.clone()),
|
||||
Artifact::StartSketchOnPlane(s) => Some(s.code_ref.clone()),
|
||||
_ => None,
|
||||
})
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
.unwrap_or_default();
|
||||
|
||||
return_arr.push(Artifact::Wall(Wall {
|
||||
@ -1118,11 +1186,7 @@ fn artifacts_to_update(
|
||||
edge_cut_edge_ids: Vec::new(),
|
||||
sweep_id: path_sweep_id,
|
||||
path_ids: Vec::new(),
|
||||
face_code_ref: CodeRef {
|
||||
range: sketch_on_face_source_range,
|
||||
node_path: NodePath::from_range(ast, sketch_on_face_source_range).unwrap_or_default(),
|
||||
path_to_node: Vec::new(),
|
||||
},
|
||||
face_code_ref: sketch_on_face_code_ref,
|
||||
cmd_id: artifact_command.cmd_id,
|
||||
}));
|
||||
let mut new_seg = seg.clone();
|
||||
@ -1155,15 +1219,19 @@ fn artifacts_to_update(
|
||||
let extra_artifact = exec_artifacts.values().find(|a| {
|
||||
if let Artifact::StartSketchOnFace(s) = a {
|
||||
s.face_id == face_id
|
||||
} else if let Artifact::StartSketchOnPlane(s) = a {
|
||||
s.plane_id == face_id
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
let sketch_on_face_source_range = extra_artifact
|
||||
let sketch_on_face_code_ref = extra_artifact
|
||||
.and_then(|a| match a {
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.range),
|
||||
Artifact::StartSketchOnFace(s) => Some(s.code_ref.clone()),
|
||||
Artifact::StartSketchOnPlane(s) => Some(s.code_ref.clone()),
|
||||
_ => None,
|
||||
})
|
||||
// TODO: If we didn't find it, it's probably a bug.
|
||||
.unwrap_or_default();
|
||||
return_arr.push(Artifact::Cap(Cap {
|
||||
id: face_id,
|
||||
@ -1171,11 +1239,7 @@ fn artifacts_to_update(
|
||||
edge_cut_edge_ids: Vec::new(),
|
||||
sweep_id: path_sweep_id,
|
||||
path_ids: Vec::new(),
|
||||
face_code_ref: CodeRef {
|
||||
range: sketch_on_face_source_range,
|
||||
node_path: NodePath::from_range(ast, sketch_on_face_source_range).unwrap_or_default(),
|
||||
path_to_node: Vec::new(),
|
||||
},
|
||||
face_code_ref: sketch_on_face_code_ref,
|
||||
cmd_id: artifact_command.cmd_id,
|
||||
}));
|
||||
let Some(Artifact::Sweep(sweep)) = artifacts.get(&path_sweep_id) else {
|
||||
|
@ -298,13 +298,19 @@ impl ArtifactGraph {
|
||||
let range = code_ref.range;
|
||||
[range.start(), range.end(), range.module_id().as_usize()]
|
||||
}
|
||||
fn node_path_display<W: Write>(output: &mut W, prefix: &str, code_ref: &CodeRef) -> std::fmt::Result {
|
||||
fn node_path_display<W: Write>(
|
||||
output: &mut W,
|
||||
prefix: &str,
|
||||
label: Option<&str>,
|
||||
code_ref: &CodeRef,
|
||||
) -> std::fmt::Result {
|
||||
// %% is a mermaid comment. Prefix is increased one level since it's
|
||||
// a child of the line above it.
|
||||
let label = label.unwrap_or("");
|
||||
if code_ref.node_path.is_empty() {
|
||||
return writeln!(output, "{prefix} %% Missing NodePath");
|
||||
return writeln!(output, "{prefix} %% {label}Missing NodePath");
|
||||
}
|
||||
writeln!(output, "{prefix} %% {:?}", code_ref.node_path.steps)
|
||||
writeln!(output, "{prefix} %% {label}{:?}", code_ref.node_path.steps)
|
||||
}
|
||||
|
||||
match artifact {
|
||||
@ -315,7 +321,7 @@ impl ArtifactGraph {
|
||||
composite_solid.sub_type,
|
||||
code_ref_display(&composite_solid.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &composite_solid.code_ref)?;
|
||||
node_path_display(output, prefix, None, &composite_solid.code_ref)?;
|
||||
}
|
||||
Artifact::Plane(plane) => {
|
||||
writeln!(
|
||||
@ -323,7 +329,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Plane<br>{:?}\"]",
|
||||
code_ref_display(&plane.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &plane.code_ref)?;
|
||||
node_path_display(output, prefix, None, &plane.code_ref)?;
|
||||
}
|
||||
Artifact::Path(path) => {
|
||||
writeln!(
|
||||
@ -331,7 +337,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Path<br>{:?}\"]",
|
||||
code_ref_display(&path.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &path.code_ref)?;
|
||||
node_path_display(output, prefix, None, &path.code_ref)?;
|
||||
}
|
||||
Artifact::Segment(segment) => {
|
||||
writeln!(
|
||||
@ -339,7 +345,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Segment<br>{:?}\"]",
|
||||
code_ref_display(&segment.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &segment.code_ref)?;
|
||||
node_path_display(output, prefix, None, &segment.code_ref)?;
|
||||
}
|
||||
Artifact::Solid2d(_solid2d) => {
|
||||
writeln!(output, "{prefix}{}[Solid2d]", id)?;
|
||||
@ -350,7 +356,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"StartSketchOnFace<br>{:?}\"]",
|
||||
code_ref_display(code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, code_ref)?;
|
||||
node_path_display(output, prefix, None, code_ref)?;
|
||||
}
|
||||
Artifact::StartSketchOnPlane(StartSketchOnPlane { code_ref, .. }) => {
|
||||
writeln!(
|
||||
@ -358,7 +364,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"StartSketchOnPlane<br>{:?}\"]",
|
||||
code_ref_display(code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, code_ref)?;
|
||||
node_path_display(output, prefix, None, code_ref)?;
|
||||
}
|
||||
Artifact::Sweep(sweep) => {
|
||||
writeln!(
|
||||
@ -367,13 +373,15 @@ impl ArtifactGraph {
|
||||
sweep.sub_type,
|
||||
code_ref_display(&sweep.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &sweep.code_ref)?;
|
||||
node_path_display(output, prefix, None, &sweep.code_ref)?;
|
||||
}
|
||||
Artifact::Wall(_wall) => {
|
||||
Artifact::Wall(wall) => {
|
||||
writeln!(output, "{prefix}{id}[Wall]")?;
|
||||
node_path_display(output, prefix, Some("face_code_ref="), &wall.face_code_ref)?;
|
||||
}
|
||||
Artifact::Cap(cap) => {
|
||||
writeln!(output, "{prefix}{id}[\"Cap {:?}\"]", cap.sub_type)?;
|
||||
node_path_display(output, prefix, Some("face_code_ref="), &cap.face_code_ref)?;
|
||||
}
|
||||
Artifact::SweepEdge(sweep_edge) => {
|
||||
writeln!(output, "{prefix}{id}[\"SweepEdge {:?}\"]", sweep_edge.sub_type)?;
|
||||
@ -385,7 +393,7 @@ impl ArtifactGraph {
|
||||
edge_cut.sub_type,
|
||||
code_ref_display(&edge_cut.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &edge_cut.code_ref)?;
|
||||
node_path_display(output, prefix, None, &edge_cut.code_ref)?;
|
||||
}
|
||||
Artifact::EdgeCutEdge(_edge_cut_edge) => {
|
||||
writeln!(output, "{prefix}{id}[EdgeCutEdge]")?;
|
||||
@ -396,7 +404,7 @@ impl ArtifactGraph {
|
||||
"{prefix}{id}[\"Helix<br>{:?}\"]",
|
||||
code_ref_display(&helix.code_ref)
|
||||
)?;
|
||||
node_path_display(output, prefix, &helix.code_ref)?;
|
||||
node_path_display(output, prefix, None, &helix.code_ref)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -79,6 +79,9 @@ pub(super) enum CacheResult {
|
||||
reapply_settings: bool,
|
||||
/// The program that needs to be executed.
|
||||
program: Node<Program>,
|
||||
/// The number of body items that were cached and omitted from the
|
||||
/// program that needs to be executed. Used to compute [`crate::NodePath`].
|
||||
cached_body_items: usize,
|
||||
},
|
||||
/// Check only the imports, and not the main program.
|
||||
/// Before sending this we already checked the main program and it is the same.
|
||||
@ -191,6 +194,7 @@ pub(super) async fn get_changed_program(old: CacheInformation<'_>, new: CacheInf
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new.ast.clone(),
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -219,6 +223,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
};
|
||||
}
|
||||
|
||||
@ -239,6 +244,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: true,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
@ -255,6 +261,7 @@ fn generate_changed_program(old_ast: Node<Program>, mut new_ast: Node<Program>,
|
||||
clear_scene: false,
|
||||
reapply_settings,
|
||||
program: new_ast,
|
||||
cached_body_items: old_ast.body.len(),
|
||||
}
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
@ -592,7 +599,8 @@ startSketchOn(XY)
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -630,7 +638,8 @@ startSketchOn(XY)
|
||||
CacheResult::ReExecute {
|
||||
clear_scene: true,
|
||||
reapply_settings: true,
|
||||
program: new_program.ast
|
||||
program: new_program.ast,
|
||||
cached_body_items: 0,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -736,21 +736,35 @@ fn apply_ascription(
|
||||
let ty = RuntimeType::from_parsed(ty.inner.clone(), exec_state, value.into())
|
||||
.map_err(|e| KclError::Semantic(e.into()))?;
|
||||
|
||||
if let KclValue::Number { value, meta, .. } = value {
|
||||
let mut value = value.clone();
|
||||
|
||||
// If the number has unknown units but the user is explicitly specifying them, treat the value as having had it's units erased,
|
||||
// rather than forcing the user to explicitly erase them.
|
||||
KclValue::Number {
|
||||
if let KclValue::Number { value: n, meta, .. } = &value {
|
||||
if let RuntimeType::Primitive(PrimitiveType::Number(num)) = &ty {
|
||||
if num.is_fully_specified() {
|
||||
value = KclValue::Number {
|
||||
ty: NumericType::Any,
|
||||
value: *value,
|
||||
value: *n,
|
||||
meta: meta.clone(),
|
||||
};
|
||||
}
|
||||
.coerce(&ty, exec_state)
|
||||
}
|
||||
}
|
||||
|
||||
value.coerce(&ty, exec_state).map_err(|_| {
|
||||
let suggestion = if ty == RuntimeType::length() {
|
||||
", you might try coercing to a fully specified numeric type such as `number(mm)`"
|
||||
} else if ty == RuntimeType::angle() {
|
||||
", you might try coercing to a fully specified numeric type such as `number(deg)`"
|
||||
} else {
|
||||
value.coerce(&ty, exec_state)
|
||||
}
|
||||
.map_err(|_| {
|
||||
""
|
||||
};
|
||||
KclError::Semantic(KclErrorDetails {
|
||||
message: format!("could not coerce {} value to type {}", value.human_friendly_type(), ty),
|
||||
message: format!(
|
||||
"could not coerce {} value to type {ty}{suggestion}",
|
||||
value.human_friendly_type()
|
||||
),
|
||||
source_ranges: vec![source_range],
|
||||
})
|
||||
})
|
||||
@ -1453,7 +1467,6 @@ impl Node<CallExpressionKw> {
|
||||
.await
|
||||
.map_err(|e| {
|
||||
// Add the call expression to the source ranges.
|
||||
// TODO currently ignored by the frontend
|
||||
e.add_source_ranges(vec![callsite])
|
||||
})?;
|
||||
|
||||
@ -2767,4 +2780,29 @@ startSketchOn(XY)
|
||||
// Make sure we get a useful error message and not an engine error.
|
||||
assert!(e.message().contains("sqrt"), "Error message: '{}'", e.message());
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn coerce_unknown_to_length() {
|
||||
let ast = r#"x = 2mm * 2mm
|
||||
y = x: number(Length)"#;
|
||||
let e = parse_execute(ast).await.unwrap_err();
|
||||
assert!(
|
||||
e.message().contains("could not coerce"),
|
||||
"Error message: '{}'",
|
||||
e.message()
|
||||
);
|
||||
|
||||
let ast = r#"x = 2mm
|
||||
y = x: number(Length)"#;
|
||||
let result = parse_execute(ast).await.unwrap();
|
||||
let mem = result.exec_state.stack();
|
||||
let num = mem
|
||||
.memory
|
||||
.get_from("y", result.mem_env, SourceRange::default(), 0)
|
||||
.unwrap()
|
||||
.as_ty_f64()
|
||||
.unwrap();
|
||||
assert_eq!(num.n, 2.0);
|
||||
assert_eq!(num.ty, NumericType::mm());
|
||||
}
|
||||
}
|
||||
|
@ -571,7 +571,7 @@ impl ExecutorContext {
|
||||
// part of the scene).
|
||||
exec_state.mut_stack().push_new_env_for_scope();
|
||||
|
||||
let result = self.inner_run(&program, &mut exec_state, true).await?;
|
||||
let result = self.inner_run(&program, 0, &mut exec_state, true).await?;
|
||||
|
||||
// Restore any temporary variables, then save any newly created variables back to
|
||||
// memory in case another run wants to use them. Note this is just saved to the preserved
|
||||
@ -590,12 +590,13 @@ impl ExecutorContext {
|
||||
pub async fn run_with_caching(&self, program: crate::Program) -> Result<ExecOutcome, KclErrorWithOutputs> {
|
||||
assert!(!self.is_mock());
|
||||
|
||||
let (program, mut exec_state, preserve_mem, imports_info) = if let Some(OldAstState {
|
||||
let (program, mut exec_state, preserve_mem, cached_body_items, imports_info) = if let Some(OldAstState {
|
||||
ast: old_ast,
|
||||
exec_state: mut old_state,
|
||||
settings: old_settings,
|
||||
result_env,
|
||||
}) = cache::read_old_ast().await
|
||||
}) =
|
||||
cache::read_old_ast().await
|
||||
{
|
||||
let old = CacheInformation {
|
||||
ast: &old_ast,
|
||||
@ -607,11 +608,13 @@ impl ExecutorContext {
|
||||
};
|
||||
|
||||
// Get the program that actually changed from the old and new information.
|
||||
let (clear_scene, program, import_check_info) = match cache::get_changed_program(old, new).await {
|
||||
let (clear_scene, program, body_items, import_check_info) = match cache::get_changed_program(old, new).await
|
||||
{
|
||||
CacheResult::ReExecute {
|
||||
clear_scene,
|
||||
reapply_settings,
|
||||
program: changed_program,
|
||||
cached_body_items,
|
||||
} => {
|
||||
if reapply_settings
|
||||
&& self
|
||||
@ -620,7 +623,7 @@ impl ExecutorContext {
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
(true, program, cached_body_items, None)
|
||||
} else {
|
||||
(
|
||||
clear_scene,
|
||||
@ -628,6 +631,7 @@ impl ExecutorContext {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
cached_body_items,
|
||||
None,
|
||||
)
|
||||
}
|
||||
@ -643,7 +647,7 @@ impl ExecutorContext {
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
(true, program, None)
|
||||
(true, program, old_ast.body.len(), None)
|
||||
} else {
|
||||
// We need to check our imports to see if they changed.
|
||||
let mut new_exec_state = ExecState::new(self);
|
||||
@ -676,6 +680,7 @@ impl ExecutorContext {
|
||||
ast: changed_program,
|
||||
original_file_contents: program.original_file_contents,
|
||||
},
|
||||
old_ast.body.len(),
|
||||
// We only care about this if we are clearing the scene.
|
||||
if clear_scene {
|
||||
Some((new_universe, new_universe_map, new_exec_state))
|
||||
@ -704,7 +709,7 @@ impl ExecutorContext {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
return Ok(outcome);
|
||||
}
|
||||
(true, program, None)
|
||||
(true, program, old_ast.body.len(), None)
|
||||
}
|
||||
CacheResult::NoAction(false) => {
|
||||
let outcome = old_state.to_exec_outcome(result_env).await;
|
||||
@ -736,17 +741,17 @@ impl ExecutorContext {
|
||||
(old_state, true, None)
|
||||
};
|
||||
|
||||
(program, exec_state, preserve_mem, universe_info)
|
||||
(program, exec_state, preserve_mem, body_items, universe_info)
|
||||
} else {
|
||||
let mut exec_state = ExecState::new(self);
|
||||
self.send_clear_scene(&mut exec_state, Default::default())
|
||||
.await
|
||||
.map_err(KclErrorWithOutputs::no_outputs)?;
|
||||
(program, exec_state, false, None)
|
||||
(program, exec_state, false, 0, None)
|
||||
};
|
||||
|
||||
let result = self
|
||||
.run_concurrent(&program, &mut exec_state, imports_info, preserve_mem)
|
||||
.run_concurrent(&program, cached_body_items, &mut exec_state, imports_info, preserve_mem)
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@ -780,7 +785,7 @@ impl ExecutorContext {
|
||||
program: &crate::Program,
|
||||
exec_state: &mut ExecState,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
self.run_concurrent(program, exec_state, None, false).await
|
||||
self.run_concurrent(program, 0, exec_state, None, false).await
|
||||
}
|
||||
|
||||
/// Perform the execution of a program using a concurrent
|
||||
@ -793,6 +798,7 @@ impl ExecutorContext {
|
||||
pub async fn run_concurrent(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
universe_info: Option<(Universe, UniverseMap)>,
|
||||
preserve_mem: bool,
|
||||
@ -823,6 +829,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -999,6 +1006,7 @@ impl ExecutorContext {
|
||||
|
||||
return Err(KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1014,7 +1022,8 @@ impl ExecutorContext {
|
||||
}
|
||||
}
|
||||
|
||||
self.inner_run(program, exec_state, preserve_mem).await
|
||||
self.inner_run(program, cached_body_items, exec_state, preserve_mem)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Get the universe & universe map of the program.
|
||||
@ -1048,6 +1057,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
err,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1068,6 +1078,7 @@ impl ExecutorContext {
|
||||
async fn inner_run(
|
||||
&self,
|
||||
program: &crate::Program,
|
||||
cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<(EnvironmentRef, Option<ModelingSessionData>), KclErrorWithOutputs> {
|
||||
@ -1081,7 +1092,7 @@ impl ExecutorContext {
|
||||
|
||||
let default_planes = self.engine.get_default_planes().read().await.clone();
|
||||
let result = self
|
||||
.execute_and_build_graph(&program.ast, exec_state, preserve_mem)
|
||||
.execute_and_build_graph(&program.ast, cached_body_items, exec_state, preserve_mem)
|
||||
.await;
|
||||
|
||||
crate::log::log(format!(
|
||||
@ -1100,6 +1111,7 @@ impl ExecutorContext {
|
||||
|
||||
KclErrorWithOutputs::new(
|
||||
e,
|
||||
exec_state.errors().to_vec(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
exec_state.global.operations.clone(),
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
@ -1127,6 +1139,7 @@ impl ExecutorContext {
|
||||
async fn execute_and_build_graph(
|
||||
&self,
|
||||
program: NodeRef<'_, crate::parsing::ast::types::Program>,
|
||||
#[cfg_attr(not(feature = "artifact-graph"), expect(unused))] cached_body_items: usize,
|
||||
exec_state: &mut ExecState,
|
||||
preserve_mem: bool,
|
||||
) -> Result<EnvironmentRef, KclError> {
|
||||
@ -1155,23 +1168,25 @@ impl ExecutorContext {
|
||||
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
{
|
||||
// Move the artifact commands and responses to simplify cache management
|
||||
// and error creation.
|
||||
exec_state
|
||||
.global
|
||||
.artifact_commands
|
||||
.extend(self.engine.take_artifact_commands().await);
|
||||
exec_state
|
||||
.global
|
||||
.artifact_responses
|
||||
.extend(self.engine.take_responses().await);
|
||||
let new_commands = self.engine.take_artifact_commands().await;
|
||||
let new_responses = self.engine.take_responses().await;
|
||||
let initial_graph = exec_state.global.artifact_graph.clone();
|
||||
|
||||
// Build the artifact graph.
|
||||
match build_artifact_graph(
|
||||
&exec_state.global.artifact_commands,
|
||||
&exec_state.global.artifact_responses,
|
||||
let graph_result = build_artifact_graph(
|
||||
&new_commands,
|
||||
&new_responses,
|
||||
program,
|
||||
&exec_state.global.artifacts,
|
||||
) {
|
||||
cached_body_items,
|
||||
&mut exec_state.global.artifacts,
|
||||
initial_graph,
|
||||
);
|
||||
// Move the artifact commands and responses into ExecState to
|
||||
// simplify cache management and error creation.
|
||||
exec_state.global.artifact_commands.extend(new_commands);
|
||||
exec_state.global.artifact_responses.extend(new_responses);
|
||||
|
||||
match graph_result {
|
||||
Ok(artifact_graph) => {
|
||||
exec_state.global.artifact_graph = artifact_graph;
|
||||
exec_result.map(|(_, env_ref, _)| env_ref)
|
||||
|
@ -664,6 +664,17 @@ impl NumericType {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_fully_specified(&self) -> bool {
|
||||
!matches!(
|
||||
self,
|
||||
NumericType::Unknown
|
||||
| NumericType::Known(UnitType::Angle(UnitAngle::Unknown))
|
||||
| NumericType::Known(UnitType::Length(UnitLen::Unknown))
|
||||
| NumericType::Any
|
||||
| NumericType::Default { .. }
|
||||
)
|
||||
}
|
||||
|
||||
fn example_ty(&self) -> Option<String> {
|
||||
match self {
|
||||
Self::Known(t) if !self.is_unknown() => Some(t.to_string()),
|
||||
@ -1266,7 +1277,15 @@ impl KclValue {
|
||||
.satisfied(values.len(), allow_shrink)
|
||||
.ok_or(CoercionError::from(self))?;
|
||||
|
||||
assert!(len <= values.len());
|
||||
if len > values.len() {
|
||||
let message = format!(
|
||||
"Internal: Expected coerced array length {len} to be less than or equal to original length {}",
|
||||
values.len()
|
||||
);
|
||||
exec_state.err(CompilationError::err(self.into(), message.clone()));
|
||||
#[cfg(debug_assertions)]
|
||||
panic!("{message}");
|
||||
}
|
||||
values.truncate(len);
|
||||
|
||||
Ok(KclValue::HomArray {
|
||||
|
@ -99,7 +99,7 @@ pub use lsp::{
|
||||
kcl::{Backend as KclLspBackend, Server as KclLspServerSubCommand},
|
||||
};
|
||||
pub use modules::ModuleId;
|
||||
pub use parsing::ast::types::{FormatOptions, NodePath};
|
||||
pub use parsing::ast::types::{FormatOptions, NodePath, Step as NodePathStep};
|
||||
pub use settings::types::{project::ProjectConfiguration, Configuration, UnitLength};
|
||||
pub use source_range::SourceRange;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@ -250,8 +250,8 @@ impl Program {
|
||||
self.ast.lint(rule)
|
||||
}
|
||||
|
||||
pub fn node_path_from_range(&self, range: SourceRange) -> Option<NodePath> {
|
||||
NodePath::from_range(&self.ast, range)
|
||||
pub fn node_path_from_range(&self, cached_body_items: usize, range: SourceRange) -> Option<NodePath> {
|
||||
NodePath::from_range(&self.ast, cached_body_items, range)
|
||||
}
|
||||
|
||||
pub fn recast(&self) -> String {
|
||||
|
@ -11,7 +11,7 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
use parse_display::{Display, FromStr};
|
||||
pub use path::NodePath;
|
||||
pub use path::{NodePath, Step};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::lsp_types::{
|
||||
|
@ -62,14 +62,21 @@ pub enum Step {
|
||||
impl NodePath {
|
||||
/// Given a program and a [`SourceRange`], return the path to the node that
|
||||
/// contains the range.
|
||||
pub(crate) fn from_range(program: &Node<Program>, range: SourceRange) -> Option<Self> {
|
||||
Self::from_body(&program.body, range, NodePath::default())
|
||||
pub(crate) fn from_range(program: &Node<Program>, cached_body_items: usize, range: SourceRange) -> Option<Self> {
|
||||
Self::from_body(&program.body, cached_body_items, range, NodePath::default())
|
||||
}
|
||||
|
||||
fn from_body(body: &[BodyItem], range: SourceRange, mut path: NodePath) -> Option<NodePath> {
|
||||
fn from_body(
|
||||
body: &[BodyItem],
|
||||
cached_body_items: usize,
|
||||
range: SourceRange,
|
||||
mut path: NodePath,
|
||||
) -> Option<NodePath> {
|
||||
for (i, item) in body.iter().enumerate() {
|
||||
if item.contains_range(&range) {
|
||||
path.push(Step::ProgramBodyItem { index: i });
|
||||
path.push(Step::ProgramBodyItem {
|
||||
index: cached_body_items + i,
|
||||
});
|
||||
return Self::from_body_item(item, range, path);
|
||||
}
|
||||
}
|
||||
@ -262,7 +269,7 @@ impl NodePath {
|
||||
}
|
||||
if node.then_val.contains_range(&range) {
|
||||
path.push(Step::IfExpressionThen);
|
||||
return Self::from_body(&node.then_val.body, range, path);
|
||||
return Self::from_body(&node.then_val.body, 0, range, path);
|
||||
}
|
||||
for else_if in &node.else_ifs {
|
||||
if else_if.contains_range(&range) {
|
||||
@ -273,14 +280,14 @@ impl NodePath {
|
||||
}
|
||||
if else_if.then_val.contains_range(&range) {
|
||||
path.push(Step::IfExpressionElseIfBody);
|
||||
return Self::from_body(&else_if.then_val.body, range, path);
|
||||
return Self::from_body(&else_if.then_val.body, 0, range, path);
|
||||
}
|
||||
return Some(path);
|
||||
}
|
||||
}
|
||||
if node.final_else.contains_range(&range) {
|
||||
path.push(Step::IfExpressionElse);
|
||||
return Self::from_body(&node.final_else.body, range, path);
|
||||
return Self::from_body(&node.final_else.body, 0, range, path);
|
||||
}
|
||||
}
|
||||
Expr::LabelledExpression(node) => {
|
||||
@ -345,7 +352,7 @@ mod tests {
|
||||
// fn cube(sideLength, center) {
|
||||
// ^^^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(38, 42)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(38, 42)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![Step::ProgramBodyItem { index: 0 }, Step::VariableDeclarationDeclaration],
|
||||
}
|
||||
@ -353,7 +360,7 @@ mod tests {
|
||||
// fn cube(sideLength, center) {
|
||||
// ^^^^^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(55, 61)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(55, 61)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 0 },
|
||||
@ -366,7 +373,7 @@ mod tests {
|
||||
// |> line(endAbsolute = p1)
|
||||
// ^^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(293, 295)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(293, 295)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 0 },
|
||||
@ -383,7 +390,7 @@ mod tests {
|
||||
// myCube = cube(sideLength = 40, center = [0, 0])
|
||||
// ^
|
||||
assert_eq!(
|
||||
NodePath::from_range(&program.ast, range(485, 486)).unwrap(),
|
||||
NodePath::from_range(&program.ast, 0, range(485, 486)).unwrap(),
|
||||
NodePath {
|
||||
steps: vec![
|
||||
Step::ProgramBodyItem { index: 1 },
|
||||
|
@ -21,7 +21,7 @@ struct Test {
|
||||
name: String,
|
||||
/// The name of the KCL file that's the entry point, e.g. "main.kcl", in the
|
||||
/// `input_dir`.
|
||||
entry_point: String,
|
||||
entry_point: PathBuf,
|
||||
/// Input KCL files are in this directory.
|
||||
input_dir: PathBuf,
|
||||
/// Expected snapshot output files are in this directory.
|
||||
@ -34,11 +34,16 @@ impl Test {
|
||||
fn new(name: &str) -> Self {
|
||||
Self {
|
||||
name: name.to_owned(),
|
||||
entry_point: "input.kcl".to_owned(),
|
||||
entry_point: Path::new("tests").join(name).join("input.kcl"),
|
||||
input_dir: Path::new("tests").join(name),
|
||||
output_dir: Path::new("tests").join(name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Read in the entry point file and return its contents as a string.
|
||||
pub fn read(&self) -> String {
|
||||
std::fs::read_to_string(&self.entry_point).expect("Failed to read file: {filename}")
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_snapshot<F, R>(test: &Test, operation: &str, f: F)
|
||||
@ -66,19 +71,12 @@ where
|
||||
settings.bind(f);
|
||||
}
|
||||
|
||||
fn read<P>(filename: &str, dir: P) -> String
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
std::fs::read_to_string(dir.as_ref().join(filename)).expect("Failed to read file: {filename}")
|
||||
}
|
||||
|
||||
fn parse(test_name: &str) {
|
||||
parse_test(&Test::new(test_name));
|
||||
}
|
||||
|
||||
fn parse_test(test: &Test) {
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let tokens = crate::parsing::token::lex(&input, ModuleId::default()).unwrap();
|
||||
|
||||
// Parse the tokens into an AST.
|
||||
@ -98,7 +96,7 @@ async fn unparse(test_name: &str) {
|
||||
|
||||
async fn unparse_test(test: &Test) {
|
||||
// Parse into an AST
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let tokens = crate::parsing::token::lex(&input, ModuleId::default()).unwrap();
|
||||
let ast = crate::parsing::parse_tokens(tokens).unwrap();
|
||||
|
||||
@ -111,10 +109,9 @@ async fn unparse_test(test: &Test) {
|
||||
}));
|
||||
|
||||
// Check all the rest of the files in the directory.
|
||||
let entry_point = test.input_dir.join(&test.entry_point);
|
||||
let kcl_files = crate::unparser::walk_dir(&test.input_dir).await.unwrap();
|
||||
// Filter out the entry point file.
|
||||
let kcl_files = kcl_files.into_iter().filter(|f| f != &entry_point);
|
||||
let kcl_files = kcl_files.into_iter().filter(|f| f != &test.entry_point);
|
||||
let futures = kcl_files
|
||||
.into_iter()
|
||||
.filter(|file| file.extension().is_some_and(|ext| ext == "kcl")) // We only care about kcl
|
||||
@ -154,13 +151,11 @@ async fn execute(test_name: &str, render_to_png: bool) {
|
||||
}
|
||||
|
||||
async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
|
||||
let input = read(&test.entry_point, &test.input_dir);
|
||||
let input = test.read();
|
||||
let ast = crate::Program::parse_no_errs(&input).unwrap();
|
||||
|
||||
// Run the program.
|
||||
let exec_res =
|
||||
crate::test_server::execute_and_snapshot_ast(ast, Some(test.input_dir.join(&test.entry_point)), export_step)
|
||||
.await;
|
||||
let exec_res = crate::test_server::execute_and_snapshot_ast(ast, Some(test.entry_point.clone()), export_step).await;
|
||||
match exec_res {
|
||||
Ok((exec_state, env_ref, png, step)) => {
|
||||
let fail_path = test.output_dir.join("execution_error.snap");
|
||||
@ -2770,7 +2765,7 @@ mod clone_w_fillets {
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore] // turn on when https://github.com/KittyCAD/engine/pull/3380 is merged
|
||||
// Theres also a test in clone.rs you need to turn too
|
||||
// There's also a test in clone.rs you need to turn too
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
@ -3091,6 +3086,48 @@ mod error_revolve_on_edge_get_edge {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_with_pattern {
|
||||
const TEST_NAME: &str = "subtract_with_pattern";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_with_pattern_cut_thru {
|
||||
const TEST_NAME: &str = "subtract_with_pattern_cut_thru";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod sketch_on_face_union {
|
||||
const TEST_NAME: &str = "sketch_on_face_union";
|
||||
|
||||
@ -3112,3 +3149,67 @@ mod sketch_on_face_union {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod multi_target_csg {
|
||||
const TEST_NAME: &str = "multi_target_csg";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod revolve_colinear {
|
||||
const TEST_NAME: &str = "revolve-colinear";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
#[ignore] // until https://github.com/KittyCAD/engine/pull/3417 lands
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
mod subtract_regression07 {
|
||||
const TEST_NAME: &str = "subtract_regression07";
|
||||
|
||||
/// Test parsing KCL.
|
||||
#[test]
|
||||
fn parse() {
|
||||
super::parse(TEST_NAME)
|
||||
}
|
||||
|
||||
/// Test that parsing and unparsing KCL produces the original KCL input.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn unparse() {
|
||||
super::unparse(TEST_NAME).await
|
||||
}
|
||||
|
||||
/// Test that KCL is executed correctly.
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn kcl_test_execute() {
|
||||
super::execute(TEST_NAME, true).await
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ lazy_static::lazy_static! {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
fn parse(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
let write_new = matches!(
|
||||
std::env::var("INSTA_UPDATE").as_deref(),
|
||||
Ok("auto" | "always" | "new" | "unseen")
|
||||
@ -37,7 +37,7 @@ fn parse(dir_name: &str, dir_path: &Path) {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
async fn unparse(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
unparse_test(&t).await;
|
||||
}
|
||||
|
||||
@ -71,7 +71,7 @@ async fn unparse_test(test: &Test) {
|
||||
|
||||
#[kcl_directory_test_macro::test_all_dirs("../public/kcl-samples")]
|
||||
async fn kcl_test_execute(dir_name: &str, dir_path: &Path) {
|
||||
let t = test(dir_name, dir_path.join("main.kcl").to_str().unwrap().to_owned());
|
||||
let t = test(dir_name, dir_path.join("main.kcl"));
|
||||
super::execute_test(&t, true, true).await;
|
||||
}
|
||||
|
||||
@ -129,12 +129,22 @@ fn test_after_engine_generate_manifest() {
|
||||
generate_kcl_manifest(&INPUTS_DIR).unwrap();
|
||||
}
|
||||
|
||||
fn test(test_name: &str, entry_point: String) -> Test {
|
||||
fn test(test_name: &str, entry_point: std::path::PathBuf) -> Test {
|
||||
let parent = std::fs::canonicalize(entry_point.parent().unwrap()).unwrap();
|
||||
let inputs_dir = std::fs::canonicalize(INPUTS_DIR.as_path()).unwrap();
|
||||
let relative_path = parent.strip_prefix(inputs_dir).unwrap();
|
||||
let output_dir = std::fs::canonicalize(OUTPUTS_DIR.as_path()).unwrap();
|
||||
let relative_output_dir = output_dir.join(relative_path);
|
||||
|
||||
// Ensure the output directory exists.
|
||||
if !relative_output_dir.exists() {
|
||||
std::fs::create_dir_all(&relative_output_dir).unwrap();
|
||||
}
|
||||
Test {
|
||||
name: test_name.to_owned(),
|
||||
entry_point,
|
||||
input_dir: INPUTS_DIR.join(test_name),
|
||||
output_dir: OUTPUTS_DIR.join(test_name),
|
||||
entry_point: entry_point.clone(),
|
||||
input_dir: parent.to_path_buf(),
|
||||
output_dir: relative_output_dir,
|
||||
}
|
||||
}
|
||||
|
||||
@ -173,8 +183,9 @@ fn kcl_samples_inputs() -> Vec<Test> {
|
||||
eprintln!("Found KCL sample: {:?}", dir_name.to_string_lossy());
|
||||
// Look for the entry point inside the directory.
|
||||
let sub_dir = INPUTS_DIR.join(dir_name);
|
||||
let entry_point = if sub_dir.join("main.kcl").exists() {
|
||||
"main.kcl".to_owned()
|
||||
let main_kcl_path = sub_dir.join("main.kcl");
|
||||
let entry_point = if main_kcl_path.exists() {
|
||||
main_kcl_path
|
||||
} else {
|
||||
panic!("No main.kcl found in {:?}", sub_dir);
|
||||
};
|
||||
|
@ -102,7 +102,7 @@ impl TyF64 {
|
||||
t => unreachable!("expected length, found {t:?}"),
|
||||
};
|
||||
|
||||
assert_ne!(len, UnitLen::Unknown);
|
||||
debug_assert_ne!(len, UnitLen::Unknown);
|
||||
|
||||
len.adjust_to(self.n, units).0
|
||||
}
|
||||
@ -114,7 +114,7 @@ impl TyF64 {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
assert_ne!(angle, UnitAngle::Unknown);
|
||||
debug_assert_ne!(angle, UnitAngle::Unknown);
|
||||
|
||||
angle.adjust_to(self.n, UnitAngle::Degrees).0
|
||||
}
|
||||
@ -126,7 +126,7 @@ impl TyF64 {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
assert_ne!(angle, UnitAngle::Unknown);
|
||||
debug_assert_ne!(angle, UnitAngle::Unknown);
|
||||
|
||||
angle.adjust_to(self.n, UnitAngle::Radians).0
|
||||
}
|
||||
|
@ -161,6 +161,7 @@ async fn fix_tags_and_references(
|
||||
},
|
||||
exec_state,
|
||||
args,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -293,20 +293,6 @@ pub async fn subtract(exec_state: &mut ExecState, args: Args) -> Result<KclValue
|
||||
let solids: Vec<Solid> = args.get_unlabeled_kw_arg_typed("solids", &RuntimeType::solids(), exec_state)?;
|
||||
let tools: Vec<Solid> = args.get_kw_arg_typed("tools", &RuntimeType::solids(), exec_state)?;
|
||||
|
||||
if solids.len() > 1 {
|
||||
return Err(KclError::UndefinedValue(KclErrorDetails {
|
||||
message: "Only one solid is allowed for a subtract operation, currently.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
if tools.len() > 1 {
|
||||
return Err(KclError::UndefinedValue(KclErrorDetails {
|
||||
message: "Only one tool is allowed for a subtract operation, currently.".to_string(),
|
||||
source_ranges: vec![args.source_range],
|
||||
}));
|
||||
}
|
||||
|
||||
let tolerance: Option<TyF64> = args.get_kw_arg_opt_typed("tolerance", &RuntimeType::length(), exec_state)?;
|
||||
|
||||
let solids = inner_subtract(solids, tools, tolerance, exec_state, args).await?;
|
||||
|
@ -220,6 +220,7 @@ async fn inner_extrude(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
None,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
@ -234,6 +235,7 @@ pub(crate) struct NamedCapTags<'a> {
|
||||
pub end: Option<&'a TagNode>,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn do_post_extrude<'a>(
|
||||
sketch: &Sketch,
|
||||
#[cfg(feature = "artifact-graph")] solid_id: ArtifactId,
|
||||
@ -242,6 +244,7 @@ pub(crate) async fn do_post_extrude<'a>(
|
||||
named_cap_tags: &'a NamedCapTags<'a>,
|
||||
exec_state: &mut ExecState,
|
||||
args: &Args,
|
||||
edge_id: Option<Uuid>,
|
||||
) -> Result<Solid, KclError> {
|
||||
// Bring the object to the front of the scene.
|
||||
// See: https://github.com/KittyCAD/modeling-app/issues/806
|
||||
@ -251,7 +254,9 @@ pub(crate) async fn do_post_extrude<'a>(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let any_edge_id = if let Some(edge_id) = sketch.mirror {
|
||||
let any_edge_id = if let Some(id) = edge_id {
|
||||
id
|
||||
} else if let Some(edge_id) = sketch.mirror {
|
||||
edge_id
|
||||
} else {
|
||||
// The "get extrusion face info" API call requires *any* edge on the sketch being extruded.
|
||||
|
@ -187,6 +187,7 @@ async fn inner_loft(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
None,
|
||||
)
|
||||
.await?,
|
||||
))
|
||||
|
@ -20,6 +20,8 @@ use crate::{
|
||||
std::{axis_or_reference::Axis2dOrEdgeReference, extrude::do_post_extrude, Args},
|
||||
};
|
||||
|
||||
extern crate nalgebra_glm as glm;
|
||||
|
||||
/// Revolve a sketch or set of sketches around an axis.
|
||||
pub async fn revolve(exec_state: &mut ExecState, args: Args) -> Result<KclValue, KclError> {
|
||||
let sketches = args.get_unlabeled_kw_arg_typed("sketches", &RuntimeType::sketches(), exec_state)?;
|
||||
@ -131,8 +133,9 @@ async fn inner_revolve(
|
||||
let mut solids = Vec::new();
|
||||
for sketch in &sketches {
|
||||
let id = exec_state.next_uuid();
|
||||
let tolerance = tolerance.as_ref().map(|t| t.to_mm()).unwrap_or(DEFAULT_TOLERANCE);
|
||||
|
||||
match &axis {
|
||||
let direction = match &axis {
|
||||
Axis2dOrEdgeReference::Axis { direction, origin } => {
|
||||
args.batch_modeling_cmd(
|
||||
id,
|
||||
@ -149,12 +152,13 @@ async fn inner_revolve(
|
||||
y: LengthUnit(origin[1].to_mm()),
|
||||
z: LengthUnit(0.0),
|
||||
},
|
||||
tolerance: LengthUnit(tolerance.as_ref().map(|t| t.to_mm()).unwrap_or(DEFAULT_TOLERANCE)),
|
||||
tolerance: LengthUnit(tolerance),
|
||||
axis_is_2d: true,
|
||||
opposite: opposite.clone(),
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
glm::DVec2::new(direction[0].to_mm(), direction[1].to_mm())
|
||||
}
|
||||
Axis2dOrEdgeReference::Edge(edge) => {
|
||||
let edge_id = edge.get_engine_id(exec_state, &args)?;
|
||||
@ -164,12 +168,29 @@ async fn inner_revolve(
|
||||
angle,
|
||||
target: sketch.id.into(),
|
||||
edge_id,
|
||||
tolerance: LengthUnit(tolerance.as_ref().map(|t| t.to_mm()).unwrap_or(DEFAULT_TOLERANCE)),
|
||||
tolerance: LengthUnit(tolerance),
|
||||
opposite: opposite.clone(),
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
//TODO: fix me! Need to be able to calculate this to ensure the path isn't colinear
|
||||
glm::DVec2::new(0.0, 1.0)
|
||||
}
|
||||
};
|
||||
|
||||
let mut edge_id = None;
|
||||
// If an edge lies on the axis of revolution it will not exist after the revolve, so
|
||||
// it cannot be used to retrieve data about the solid
|
||||
for path in sketch.paths.clone() {
|
||||
let from = path.get_from();
|
||||
let to = path.get_to();
|
||||
|
||||
let dir = glm::DVec2::new(to[0].n - from[0].n, to[1].n - from[1].n);
|
||||
if glm::are_collinear2d(&dir, &direction, tolerance) {
|
||||
continue;
|
||||
}
|
||||
edge_id = Some(path.get_id());
|
||||
break;
|
||||
}
|
||||
|
||||
solids.push(
|
||||
@ -185,6 +206,7 @@ async fn inner_revolve(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
edge_id,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
@ -110,9 +110,9 @@ pub async fn sweep(exec_state: &mut ExecState, args: Args) -> Result<KclValue, K
|
||||
///
|
||||
///
|
||||
/// // Create a spring by sweeping around the helix path.
|
||||
/// springSketch = startSketchOn(YZ)
|
||||
/// |> circle( center = [0, 0], radius = 1)
|
||||
/// |> sweep(path = helixPath, relativeTo = "sketchPlane")
|
||||
/// springSketch = startSketchOn(XZ)
|
||||
/// |> circle( center = [5, 0], radius = 1)
|
||||
/// |> sweep(path = helixPath)
|
||||
/// ```
|
||||
///
|
||||
/// ```no_run
|
||||
@ -228,6 +228,7 @@ async fn inner_sweep(
|
||||
},
|
||||
exec_state,
|
||||
&args,
|
||||
None,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
@ -82,9 +82,9 @@ export END = 'end'
|
||||
/// )
|
||||
///
|
||||
/// // Create a spring by sweeping around the helix path.
|
||||
/// springSketch = startSketchOn(YZ)
|
||||
/// |> circle( center = [0, 0], radius = 0.5)
|
||||
/// |> sweep(path = helixPath, relativeTo = sweep::SKETCH_PLANE)
|
||||
/// springSketch = startSketchOn(XZ)
|
||||
/// |> circle( center = [5, 0], radius = 0.5)
|
||||
/// |> sweep(path = helixPath)
|
||||
/// ```
|
||||
///
|
||||
/// ```
|
||||
@ -103,9 +103,9 @@ export END = 'end'
|
||||
/// )
|
||||
///
|
||||
/// // Create a spring by sweeping around the helix path.
|
||||
/// springSketch = startSketchOn(XY)
|
||||
/// |> circle( center = [0, 0], radius = 0.5 )
|
||||
/// |> sweep(path = helixPath, relativeTo = sweep::SKETCH_PLANE)
|
||||
/// springSketch = startSketchOn(XZ)
|
||||
/// |> circle( center = [5, 0], radius = 0.5 )
|
||||
/// |> sweep(path = helixPath)
|
||||
/// ```
|
||||
///
|
||||
/// ```
|
||||
@ -123,9 +123,9 @@ export END = 'end'
|
||||
/// )
|
||||
///
|
||||
/// // Create a spring by sweeping around the helix path.
|
||||
/// springSketch = startSketchOn(XY)
|
||||
/// |> circle( center = [0, 0], radius = 1 )
|
||||
/// |> sweep(path = helixPath, relativeTo = sweep::SKETCH_PLANE)
|
||||
/// springSketch = startSketchOn(XZ)
|
||||
/// |> circle( center = [5, 0], radius = 1 )
|
||||
/// |> sweep(path = helixPath)
|
||||
/// ```
|
||||
///
|
||||
/// ```
|
||||
@ -408,13 +408,13 @@ export fn offsetPlane(
|
||||
/// )
|
||||
///
|
||||
///
|
||||
/// springSketch = startSketchOn(YZ)
|
||||
/// springSketch = startSketchOn(XZ)
|
||||
/// |> circle( center = [0, 0], radius = 1)
|
||||
///
|
||||
/// // Create a spring by sweeping around the helix path.
|
||||
/// sweepedSpring = clone(springSketch)
|
||||
/// |> translate(x=100)
|
||||
/// |> sweep(path = helixPath, relativeTo = sweep::SKETCH_PLANE)
|
||||
/// |> translate(x=5)
|
||||
/// |> sweep(path = helixPath)
|
||||
/// ```
|
||||
///
|
||||
/// ```kcl
|
||||
|
@ -22,13 +22,21 @@ flowchart LR
|
||||
10["Sweep Extrusion<br>[279, 298, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 8 }]
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
16[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
17["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
18["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
19["SweepEdge Opposite"]
|
||||
20["SweepEdge Opposite"]
|
||||
21["SweepEdge Opposite"]
|
||||
|
@ -31,21 +31,31 @@ flowchart LR
|
||||
1["Plane<br>[12, 29, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
2["StartSketchOnFace<br>[343, 382, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
16["Sweep Extrusion<br>[258, 290, 0]"]
|
||||
%% [ProgramBodyItem { index: 1 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
17["Sweep Extrusion<br>[553, 583, 0]"]
|
||||
%% [ProgramBodyItem { index: 3 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
18[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
19[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
20[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
21[Wall]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
22[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
23[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
24[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
25["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
26["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
27["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
28["SweepEdge Opposite"]
|
||||
29["SweepEdge Opposite"]
|
||||
30["SweepEdge Opposite"]
|
||||
|
@ -13,7 +13,7 @@ flowchart LR
|
||||
3["Plane<br>[110, 138, 0]"]
|
||||
%% [ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
4["StartSketchOnPlane<br>[152, 181, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 3 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
1 <--x 4
|
||||
1 --- 5
|
||||
5 --- 6
|
||||
|
@ -55,11 +55,11 @@ flowchart LR
|
||||
1["Plane<br>[12, 29, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
2["StartSketchOnFace<br>[255, 294, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
3["StartSketchOnFace<br>[511, 550, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 4 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
4["StartSketchOnFace<br>[780, 819, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 6 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
29["Sweep Extrusion<br>[212, 242, 0]"]
|
||||
%% [ProgramBodyItem { index: 1 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
30["Sweep Extrusion<br>[468, 498, 0]"]
|
||||
@ -69,22 +69,39 @@ flowchart LR
|
||||
32["Sweep Extrusion<br>[994, 1024, 0]"]
|
||||
%% [ProgramBodyItem { index: 7 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
33[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
34[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
35[Wall]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
36[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
37[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
38[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
39[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
40[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
41[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
42[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
43[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
44[Wall]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 6 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
45["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
46["Cap End"]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 4 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
47["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
48["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
49["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
50["SweepEdge Opposite"]
|
||||
51["SweepEdge Opposite"]
|
||||
52["SweepEdge Opposite"]
|
||||
|
@ -39,17 +39,29 @@ flowchart LR
|
||||
18["Sweep Extrusion<br>[264, 286, 2]"]
|
||||
%% Missing NodePath
|
||||
19[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
20[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
21[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
22[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
23[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
24[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
25[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
26[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
27["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
28["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
29["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
30["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
31["SweepEdge Opposite"]
|
||||
32["SweepEdge Opposite"]
|
||||
33["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[195, 215, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[183, 203, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[210, 230, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[210, 230, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[183, 203, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -12,6 +12,7 @@ flowchart LR
|
||||
5["Sweep Revolve<br>[76, 120, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 2 }]
|
||||
6[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
7["SweepEdge Adjacent"]
|
||||
1 --- 2
|
||||
2 --- 3
|
||||
|
@ -12,8 +12,11 @@ flowchart LR
|
||||
5["Sweep Extrusion<br>[102, 122, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 2 }]
|
||||
6[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
7["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
8["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
9["SweepEdge Opposite"]
|
||||
10["SweepEdge Adjacent"]
|
||||
1 --- 2
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[157, 176, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -130,7 +130,7 @@ flowchart LR
|
||||
2["Plane<br>[1424, 1442, 0]"]
|
||||
%% [ProgramBodyItem { index: 11 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
3["StartSketchOnFace<br>[309, 348, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 3 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
60["Sweep Extrusion<br>[264, 296, 0]"]
|
||||
%% [ProgramBodyItem { index: 2 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
61["Sweep RevolveAboutEdge<br>[1300, 1366, 0]"]
|
||||
@ -142,26 +142,47 @@ flowchart LR
|
||||
64["Sweep RevolveAboutEdge<br>[2443, 2488, 0]"]
|
||||
%% [ProgramBodyItem { index: 18 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
65[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
66[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
67[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
68[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
69[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
70[Wall]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 3 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
71[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
72[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
73[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
74[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
75[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
76[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
77[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
78["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
79["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
80["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
81["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
82["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
83["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
84["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
85["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
86["SweepEdge Opposite"]
|
||||
87["SweepEdge Opposite"]
|
||||
88["SweepEdge Opposite"]
|
||||
|
@ -20,11 +20,17 @@ flowchart LR
|
||||
9["Sweep Extrusion<br>[374, 402, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, FunctionExpressionBody, FunctionExpressionBodyItem { index: 7 }, ReturnStatementArg, PipeBodyItem { index: 7 }]
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
18["SweepEdge Opposite"]
|
||||
|
@ -20,11 +20,17 @@ flowchart LR
|
||||
9["Sweep Extrusion<br>[366, 390, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, FunctionExpressionBody, FunctionExpressionBodyItem { index: 7 }, ReturnStatementArg, PipeBodyItem { index: 7 }]
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
18["SweepEdge Opposite"]
|
||||
|
@ -29,15 +29,21 @@ flowchart LR
|
||||
1["Plane<br>[6, 23, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
2["StartSketchOnFace<br>[203, 241, 0]"]
|
||||
%% Missing NodePath
|
||||
%% [ProgramBodyItem { index: 1 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
15["Sweep Extrusion<br>[169, 189, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
16[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
17[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
18[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
19[Wall]
|
||||
%% face_code_ref=[ProgramBodyItem { index: 1 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 0 }]
|
||||
20["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
21["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
22["SweepEdge Opposite"]
|
||||
23["SweepEdge Opposite"]
|
||||
24["SweepEdge Opposite"]
|
||||
|
@ -121,23 +121,41 @@ flowchart LR
|
||||
51["Sweep Extrusion<br>[1482, 1506, 0]"]
|
||||
%% [ProgramBodyItem { index: 18 }, VariableDeclarationDeclaration, VariableDeclarationInit, FunctionExpressionBody, FunctionExpressionBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 4 }]
|
||||
52[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
53[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
54[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
55[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
56[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
57[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
58[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
59[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
60["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
61["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
62["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
63["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
64["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
65["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
66["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
67["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
68["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
69["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
70["SweepEdge Opposite"]
|
||||
71["SweepEdge Opposite"]
|
||||
72["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[216, 236, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, ExpressionStatementExpr, PipeBodyItem { index: 5 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -19,8 +19,11 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[702, 739, 0]"]
|
||||
%% [ProgramBodyItem { index: 7 }, VariableDeclarationDeclaration, VariableDeclarationInit]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12["SweepEdge Opposite"]
|
||||
13["SweepEdge Adjacent"]
|
||||
1 --- 2
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[181, 200, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, FunctionExpressionBody, FunctionExpressionBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -18,11 +18,17 @@ flowchart LR
|
||||
8["Sweep Extrusion<br>[179, 198, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, FunctionExpressionBody, FunctionExpressionBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 6 }]
|
||||
9[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
10[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
11[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
12[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
13["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
14["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
15["SweepEdge Opposite"]
|
||||
16["SweepEdge Opposite"]
|
||||
17["SweepEdge Opposite"]
|
||||
|
@ -12,8 +12,11 @@ flowchart LR
|
||||
5["Sweep Extrusion<br>[75, 95, 0]"]
|
||||
%% [ProgramBodyItem { index: 0 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 2 }]
|
||||
6[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
7["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
8["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
9["SweepEdge Opposite"]
|
||||
10["SweepEdge Adjacent"]
|
||||
1 --- 2
|
||||
|
@ -85,31 +85,57 @@ flowchart LR
|
||||
41["Sweep Extrusion<br>[2408, 2429, 0]"]
|
||||
%% [ProgramBodyItem { index: 7 }, VariableDeclarationDeclaration, VariableDeclarationInit, PipeBodyItem { index: 2 }]
|
||||
42[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
43[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
44[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
45[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
46[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
47[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
48[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
49[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
50[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
51[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
52[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
53[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
54[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
55[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
56[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
57[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
58[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
59[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
60[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
61[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
62[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
63[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
64[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
65[Wall]
|
||||
%% face_code_ref=Missing NodePath
|
||||
66["Cap Start"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
67["Cap End"]
|
||||
%% face_code_ref=Missing NodePath
|
||||
68["SweepEdge Opposite"]
|
||||
69["SweepEdge Opposite"]
|
||||
70["SweepEdge Opposite"]
|
||||
|