Compare commits

..

5 Commits

Author SHA1 Message Date
b4fb903bd0 Add recursive isEven() test 2024-07-22 20:03:49 -04:00
1b8688f274 Add lexical scope and redefining variables in functions (#3015)
* Fix to allow variable shadowing inside functions

* Implement closures

* Fix KCL test code to not reference future tag definition

* Remove tag declarator from function parameters

This is an example where the scoping change revealed a subtle issue
with TagDeclarators.  You cannot bind a new tag using a function
parameter.

The issue is that evaluating a TagDeclarator like $foo binds an
identifier to its corresponding TagIdentifier, but returns the
TagDeclarator.  If you have a TagDeclarator passed in as a parameter
to a function, you can never get its corresponding TagIdentifier.

This seems like a case where TagDeclarator evaluation needs to be
revisited, especially now that we have scoped tags.

* Fix to query return, functions, and tag declarator AST nodes correctly
2024-07-22 19:43:40 -04:00
397839da84 Fix syntax highlighting on code pane open/close (#3083) 2024-07-20 01:45:38 -07:00
ac120838e5 setIsLoading false earlier (#3072)
Signed-off-by: Jess Frazelle <github@jessfraz.com>
2024-07-19 22:18:31 -04:00
e6a2ac9c4a Typecheck KCL args via generics, not handwritten impls (#3025)
In KCL, arguments to functions are passed in the Args struct. This struct contains a list of args, but each arg could be any KCL type (they're stored in an enum of all possible types). To get args of the correct type, these enums are fallibly converted into the type expected for the matching parameter.

Until now, the fallible conversion was handwritten for nearly each function. This is unnecessary, I've replaced it with composable traits.
2024-07-19 20:30:13 -05:00
46 changed files with 1532 additions and 2712 deletions

View File

@ -1,78 +0,0 @@
name: build-test-web
on:
pull_request:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
check-format:
runs-on: 'ubuntu-22.04'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- run: yarn fmt-check
check-types:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- uses: Swatinem/rust-cache@v2
with:
workspaces: './src/wasm-lib'
- run: yarn build:wasm
- run: yarn xstate:typegen
- run: yarn tsc
check-typos:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
- name: Install codespell
run: |
python -m pip install codespell
- name: Run codespell
run: codespell --config .codespellrc # Edit this file to tweak the typo list and other configuration.
build-test-web:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- uses: Swatinem/rust-cache@v2
with:
workspaces: './src/wasm-lib'
- run: yarn build:wasm
- run: yarn simpleserver:ci
- run: yarn test:nowatch

View File

@ -1,4 +1,4 @@
name: build-test-publish-apps
name: CI
on:
pull_request:
@ -21,8 +21,75 @@ concurrency:
cancel-in-progress: true
jobs:
check-format:
runs-on: 'ubuntu-latest'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- run: yarn fmt-check
check-types:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- uses: Swatinem/rust-cache@v2
with:
workspaces: './src/wasm-lib'
- run: yarn build:wasm
- run: yarn xstate:typegen
- run: yarn tsc
check-typos:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
- name: Install codespell
run: |
python -m pip install codespell
- name: Run codespell
run: codespell --config .codespellrc # Edit this file to tweak the typo list and other configuration.
build-test-web:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- uses: Swatinem/rust-cache@v2
with:
workspaces: './src/wasm-lib'
- run: yarn build:wasm
- run: yarn simpleserver:ci
- run: yarn test:nowatch
prepare-json-files:
runs-on: ubuntu-22.04 # seperate job on Ubuntu for easy string manipulations (compared to Windows)
runs-on: ubuntu-latest # seperate job on Ubuntu for easy string manipulations (compared to Windows)
outputs:
version: ${{ steps.export_version.outputs.version }}
steps:
@ -47,7 +114,7 @@ jobs:
- name: Set updater test version
if: ${{ env.CUT_RELEASE_PR == 'true' }}
run: |
echo "$(jq --arg url 'https://dl.zoo.dev/releases/modeling-app/updater-test/last_update.json' \
echo "$(jq --arg url 'https://dl.zoo.dev/releases/modeling-app/test/last_update.json' \
'.plugins.updater.endpoints[]=$url' src-tauri/tauri.release.conf.json --indent 2)" > src-tauri/tauri.release.conf.json
- uses: actions/upload-artifact@v3
@ -62,9 +129,18 @@ jobs:
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
build-test-app-macos:
build-test-apps:
needs: [prepare-json-files]
runs-on: macos-14
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-14, ubuntu-latest, windows-latest]
env:
# Specific Apple Universal target for macos
TAURI_ARGS_MACOS: ${{ matrix.os == 'macos-14' && '--target universal-apple-darwin' || '' }}
# Only build executable on linux (no appimage or deb)
TAURI_ARGS_UBUNTU: ${{ matrix.os == 'ubuntu-latest' && '--bundles' || '' }}
steps:
- uses: actions/checkout@v4
@ -79,6 +155,28 @@ jobs:
cp artifact/src-tauri/tauri.conf.json src-tauri/tauri.conf.json
cp artifact/src-tauri/tauri.release.conf.json src-tauri/tauri.release.conf.json
- name: Update WebView2 on Windows
if: matrix.os == 'windows-latest'
# Workaround needed to build the tauri windows app with matching edge version.
# From https://github.com/actions/runner-images/issues/9538
run: |
Invoke-WebRequest -Uri 'https://go.microsoft.com/fwlink/p/?LinkId=2124703' -OutFile 'setup.exe'
Start-Process -FilePath setup.exe -Verb RunAs -Wait
- name: Install ubuntu system dependencies
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y \
libgtk-3-dev \
libayatana-appindicator3-dev \
webkit2gtk-driver \
libsoup-3.0-dev \
libjavascriptcoregtk-4.1-dev \
libwebkit2gtk-4.1-dev \
at-spi2-core \
xvfb
- name: Sync node version and setup cache
uses: actions/setup-node@v4
with:
@ -99,25 +197,60 @@ jobs:
with:
workspaces: './src/wasm-lib'
- name: Run build:wasm
run: "yarn build:wasm${{ env.BUILD_RELEASE == 'true' && '-dev' || ''}}"
- name: Run build:wasm manually
shell: bash
env:
MODE: ${{ env.BUILD_RELEASE == 'true' && '--release' || '--debug' }}
run: |
mkdir src/wasm-lib/pkg; cd src/wasm-lib
echo "building with ${{ env.MODE }}"
npx wasm-pack build --target web --out-dir pkg ${{ env.MODE }}
cd ../../
cp src/wasm-lib/pkg/wasm_lib_bg.wasm public
- name: Run vite build
- name: Run vite build (build:both)
run: yarn vite build --mode ${{ env.BUILD_RELEASE == 'true' && 'production' || 'development' }}
- name: Fix format
run: yarn fmt
- name: Install x86 target for Universal builds
- name: Install x86 target for Universal builds (MacOS only)
if: matrix.os == 'macos-14'
run: |
rustup target add x86_64-apple-darwin
- name: Prepare certificate and variables (Windows only)
if: ${{ matrix.os == 'windows-latest' && env.BUILD_RELEASE == 'true' }}
run: |
echo "${{secrets.SM_CLIENT_CERT_FILE_B64 }}" | base64 --decode > /d/Certificate_pkcs12.p12
cat /d/Certificate_pkcs12.p12
echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
echo "SM_HOST=${{ secrets.SM_HOST }}" >> "$GITHUB_ENV"
echo "SM_API_KEY=${{ secrets.SM_API_KEY }}" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_FILE=D:\\Certificate_pkcs12.p12" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_PASSWORD=${{ secrets.SM_CLIENT_CERT_PASSWORD }}" >> "$GITHUB_ENV"
echo "C:\Program Files (x86)\Windows Kits\10\App Certification Kit" >> $GITHUB_PATH
echo "C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools" >> $GITHUB_PATH
echo "C:\Program Files\DigiCert\DigiCert One Signing Manager Tools" >> $GITHUB_PATH
shell: bash
- name: Setup certicate with SSM KSP (Windows only)
if: ${{ matrix.os == 'windows-latest' && env.BUILD_RELEASE == 'true' }}
run: |
curl -X GET https://one.digicert.com/signingmanager/api-ui/v1/releases/smtools-windows-x64.msi/download -H "x-api-key:%SM_API_KEY%" -o smtools-windows-x64.msi
msiexec /i smtools-windows-x64.msi /quiet /qn
smksp_registrar.exe list
smctl.exe keypair ls
C:\Windows\System32\certutil.exe -csp "DigiCert Signing Manager KSP" -key -user
smksp_cert_sync.exe
shell: cmd
- name: Build the app (debug)
if: ${{ env.BUILD_RELEASE == 'false' }}
run: "yarn tauri build --debug --target universal-apple-darwin"
run: "yarn tauri build --debug ${{ env.TAURI_ARGS_MACOS }} ${{ env.TAURI_ARGS_UBUNTU }}"
- name: Build for Mac TestFlight (nightly)
if: ${{ github.event_name == 'schedule' }}
if: ${{ github.event_name == 'schedule' && matrix.os == 'macos-14' }}
shell: bash
run: |
unset APPLE_SIGNING_IDENTITY
@ -181,7 +314,7 @@ jobs:
- name: 'Upload to Mac TestFlight (nightly)'
uses: apple-actions/upload-testflight-build@v1
if: ${{ github.event_name == 'schedule' }}
if: ${{ github.event_name == 'schedule' && matrix.os == 'macos-14' }}
with:
app-path: 'src-tauri/target/universal-apple-darwin/release/bundle/macos/Zoo Modeling App.pkg'
issuer-id: ${{ secrets.APPLE_STORE_ISSUER_ID }}
@ -191,7 +324,7 @@ jobs:
- name: Clean up after Mac TestFlight (nightly)
if: ${{ github.event_name == 'schedule' }}
if: ${{ github.event_name == 'schedule' && matrix.os == 'macos-14' }}
shell: bash
run: |
git status
@ -217,165 +350,35 @@ jobs:
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
run: "yarn tauri build --config src-tauri/tauri.release.conf.json --target universal-apple-darwin"
TAURI_CONF_ARGS: "--config ${{ matrix.os == 'windows-latest' && 'src-tauri\\tauri.release.conf.json' || 'src-tauri/tauri.release.conf.json' }}"
run: "yarn tauri build ${{ env.TAURI_CONF_ARGS }} ${{ env.TAURI_ARGS_MACOS }} ${{ env.TAURI_ARGS_UBUNTU }}"
- uses: actions/upload-artifact@v3
with:
path: "src-tauri/target/universal-apple-darwin/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/bundle/*/*"
- uses: actions/download-artifact@v3
if: ${{ env.CUT_RELEASE_PR == 'true' }}
- name: Copy updated .json file for updater test
if: ${{ env.CUT_RELEASE_PR == 'true' }}
run: "cp artifact/src-tauri/tauri.release.conf.json src-tauri/tauri.release.conf.json"
- name: Build the app (release, updater test)
if: ${{ env.CUT_RELEASE_PR == 'true' }}
if: matrix.os != 'ubuntu-latest'
env:
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
run: "yarn tauri build -c src-tauri/tauri.release.conf.json -b dmg --target universal-apple-darwin"
- uses: actions/upload-artifact@v3
if: ${{ env.CUT_RELEASE_PR == 'true' }}
PREFIX: ${{ matrix.os == 'macos-14' && 'src-tauri/target/universal-apple-darwin' || 'src-tauri/target' }}
MODE: ${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}
with:
path: "src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg"
name: updater-test
path: "${{ env.PREFIX }}/${{ env.MODE }}/bundle/*/*"
build-test-app-windows:
needs: [prepare-json-files]
runs-on: windows-2022
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v3
- name: Copy updated .json files
if: github.event_name == 'schedule'
- name: Run e2e tests (linux only)
if: ${{ matrix.os == 'ubuntu-latest' && github.event_name != 'release' && github.event_name != 'schedule' }}
run: |
ls -l artifact
cp artifact/package.json package.json
cp artifact/src-tauri/tauri.conf.json src-tauri/tauri.conf.json
cp artifact/src-tauri/tauri.release.conf.json src-tauri/tauri.release.conf.json
- name: Update WebView2 on Windows
# Workaround needed to build the tauri windows app with matching edge version.
# From https://github.com/actions/runner-images/issues/9538
run: |
Invoke-WebRequest -Uri 'https://go.microsoft.com/fwlink/p/?LinkId=2124703' -OutFile 'setup.exe'
Start-Process -FilePath setup.exe -Verb RunAs -Wait
- name: Sync node version and setup cache
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
cache: 'yarn' # Set this to npm, yarn or pnpm.
- run: yarn install
- name: Setup Rust
uses: dtolnay/rust-toolchain@stable
- name: Setup Rust cache
uses: swatinem/rust-cache@v2
with:
workspaces: './src-tauri -> target'
- uses: Swatinem/rust-cache@v2
with:
workspaces: './src/wasm-lib'
- name: Install aarch64 target
run: rustup target add aarch64-pc-windows-msvc
- name: Run build:wasm manually
shell: bash
cargo install tauri-driver --force
source .env.${{ env.BUILD_RELEASE == 'true' && 'production' || 'development' }}
export VITE_KC_API_BASE_URL
xvfb-run yarn test:e2e:tauri
env:
MODE: ${{ env.BUILD_RELEASE == 'true' && '--release' || '--debug' }}
run: |
mkdir src/wasm-lib/pkg; cd src/wasm-lib
echo "building with ${{ env.MODE }}"
npx wasm-pack build --target web --out-dir pkg ${{ env.MODE }}
cd ../../
cp src/wasm-lib/pkg/wasm_lib_bg.wasm public
E2E_APPLICATION: "./src-tauri/target/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/zoo-modeling-app"
KITTYCAD_API_TOKEN: ${{ env.BUILD_RELEASE == 'true' && secrets.KITTYCAD_API_TOKEN || secrets.KITTYCAD_API_TOKEN_DEV }}
- name: Run vite build
run: yarn vite build --mode ${{ env.BUILD_RELEASE == 'true' && 'production' || 'development' }}
- name: Fix format
run: yarn fmt
- name: Prepare certificate and variables (Windows only)
if: ${{ env.BUILD_RELEASE == 'true' }}
run: |
echo "${{secrets.SM_CLIENT_CERT_FILE_B64 }}" | base64 --decode > /d/Certificate_pkcs12.p12
cat /d/Certificate_pkcs12.p12
echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
echo "SM_HOST=${{ secrets.SM_HOST }}" >> "$GITHUB_ENV"
echo "SM_API_KEY=${{ secrets.SM_API_KEY }}" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_FILE=D:\\Certificate_pkcs12.p12" >> "$GITHUB_ENV"
echo "SM_CLIENT_CERT_PASSWORD=${{ secrets.SM_CLIENT_CERT_PASSWORD }}" >> "$GITHUB_ENV"
echo "C:\Program Files (x86)\Windows Kits\10\App Certification Kit" >> $GITHUB_PATH
echo "C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools" >> $GITHUB_PATH
echo "C:\Program Files\DigiCert\DigiCert One Signing Manager Tools" >> $GITHUB_PATH
shell: bash
- name: Setup certicate with SSM KSP (Windows only)
if: ${{ env.BUILD_RELEASE == 'true' }}
run: |
curl -X GET https://one.digicert.com/signingmanager/api-ui/v1/releases/smtools-windows-x64.msi/download -H "x-api-key:%SM_API_KEY%" -o smtools-windows-x64.msi
msiexec /i smtools-windows-x64.msi /quiet /qn
smksp_registrar.exe list
smctl.exe keypair ls
C:\Windows\System32\certutil.exe -csp "DigiCert Signing Manager KSP" -key -user
smksp_cert_sync.exe
shell: cmd
- name: Build the app (debug) for x86_64
if: ${{ env.BUILD_RELEASE == 'false' }}
run: "yarn run tauri build --debug"
- name: Build the app (release) and sign for x86_64
if: ${{ env.BUILD_RELEASE == 'true' }}
env:
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
run: "yarn tauri build --config src-tauri\\tauri.release.conf.json"
- uses: actions/upload-artifact@v3
with:
path: "src-tauri/target/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/bundle/*/*"
- name: Build the app (debug) for aarch64
if: ${{ env.BUILD_RELEASE == 'false' }}
run: yarn run tauri build --debug --target aarch64-pc-windows-msvc
- name: Build the app (release) and sign for aarch64
if: ${{ env.BUILD_RELEASE == 'true' }}
env:
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
run: "yarn tauri build --config src-tauri\\tauri.release.conf.json --target aarch64-pc-windows-msvc"
- uses: actions/upload-artifact@v3
with:
path: "src-tauri/target/aarch64-pc-windows-msvc/${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}/bundle/*/*"
- name: Run e2e tests
if: ${{ github.event_name != 'release' && github.event_name != 'schedule' }}
- name: Run e2e tests (windows only)
if: ${{ matrix.os == 'windows-latest' && github.event_name != 'release' && github.event_name != 'schedule' }}
run: |
cargo install tauri-driver --force
yarn wdio run wdio.conf.ts
env:
E2E_APPLICATION: ".\\src-tauri\\target\\${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}\\zoo-modeling-app.exe"
E2E_APPLICATION: ".\\src-tauri\\target\\${{ env.BUILD_RELEASE == 'true' && 'release' || 'debug' }}\\Zoo Modeling App.exe"
KITTYCAD_API_TOKEN: ${{ env.BUILD_RELEASE == 'true' && secrets.KITTYCAD_API_TOKEN || secrets.KITTYCAD_API_TOKEN_DEV }}
VITE_KC_API_BASE_URL: ${{ env.BUILD_RELEASE == 'true' && 'https://api.zoo.dev' || 'https://api.dev.zoo.dev' }}
E2E_TAURI_ENABLED: true
@ -386,41 +389,31 @@ jobs:
- name: Copy updated .json file for updater test
if: ${{ env.CUT_RELEASE_PR == 'true' }}
run: "cp artifact/src-tauri/tauri.release.conf.json src-tauri/tauri.release.conf.json"
run: |
ls -l artifact
cp artifact/src-tauri/tauri.release.conf.json src-tauri/tauri.release.conf.json
cat src-tauri/tauri.release.conf.json
- name: Build the app (release, updater test) for x86_64
if: ${{ env.CUT_RELEASE_PR == 'true' }}
- name: Build the app (release, updater test)
if: ${{ env.CUT_RELEASE_PR == 'true' && matrix.os != 'ubuntu-latest' }}
env:
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
run: "yarn tauri build -c src-tauri\\tauri.release.conf.json -b msi"
- name: Build the app (release, updater test) for aarch64
if: ${{ env.CUT_RELEASE_PR == 'true' }}
env:
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
run: "yarn tauri build -c src-tauri\\tauri.release.conf.json -b msi -t aarch64-pc-windows-msvc"
TAURI_CONF_ARGS: "-c ${{ matrix.os == 'windows-latest' && 'src-tauri\\tauri.release.conf.json' || 'src-tauri/tauri.release.conf.json' }}"
TAURI_BUNDLE_ARGS: "-b ${{ matrix.os == 'windows-latest' && 'msi' || 'dmg' }}"
run: "yarn tauri build ${{ env.TAURI_CONF_ARGS }} ${{ env.TAURI_BUNDLE_ARGS }} ${{ env.TAURI_ARGS_MACOS }}"
- uses: actions/upload-artifact@v3
if: ${{ env.CUT_RELEASE_PR == 'true' }}
if: ${{ env.CUT_RELEASE_PR == 'true' && matrix.os != 'ubuntu-latest' }}
with:
path: "src-tauri/target/release/bundle/msi/*.msi"
name: updater-test
- uses: actions/upload-artifact@v3
if: ${{ env.CUT_RELEASE_PR == 'true' }}
with:
path: "src-tauri/target/aarch64-pc-windows-msvc/release/bundle/msi/*.msi"
path: "${{ matrix.os == 'macos-14' && 'src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg' || 'src-tauri/target/release/bundle/msi/*.msi' }}"
name: updater-test
publish-apps-release:
runs-on: ubuntu-22.04
permissions:
contents: write
runs-on: ubuntu-latest
if: ${{ github.event_name == 'release' || github.event_name == 'schedule' }}
needs: [prepare-json-files, build-test-app-macos, build-test-app-windows]
needs: [check-format, check-types, check-typos, build-test-web, prepare-json-files, build-test-apps]
env:
VERSION_NO_V: ${{ needs.prepare-json-files.outputs.version }}
VERSION: ${{ github.event_name == 'release' && format('v{0}', needs.prepare-json-files.outputs.version) || needs.prepare-json-files.outputs.version }}
@ -436,8 +429,7 @@ jobs:
run: |
ls -l artifact/*/*oo*
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
WINDOWS_X86_64_SIG=`cat artifact/msi/*x64*.msi.zip.sig`
WINDOWS_AARCH64_SIG=`cat artifact/msi/*arm64*.msi.zip.sig`
WINDOWS_SIG=`cat artifact/msi/*.msi.zip.sig`
RELEASE_DIR=https://${WEBSITE_DIR}/${VERSION}
jq --null-input \
--arg version "${VERSION}" \
@ -445,10 +437,8 @@ jobs:
--arg notes "${NOTES}" \
--arg darwin_sig "$DARWIN_SIG" \
--arg darwin_url "$RELEASE_DIR/macos/${{ env.URL_CODED_NAME }}.app.tar.gz" \
--arg windows_x86_64_sig "$WINDOWS_X86_64_SIG" \
--arg windows_x86_64_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_x64_en-US.msi.zip" \
--arg windows_aarch64_sig "$WINDOWS_AARCH64_SIG" \
--arg windows_aarch64_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_arm64_en-US.msi.zip" \
--arg windows_sig "$WINDOWS_SIG" \
--arg windows_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_x64_en-US.msi.zip" \
'{
"version": $version,
"pub_date": $pub_date,
@ -463,12 +453,8 @@ jobs:
"url": $darwin_url
},
"windows-x86_64": {
"signature": $windows_x86_64_sig,
"url": $windows_x86_64_url
},
"windows-aarch64": {
"signature": $windows_aarch64_sig,
"url": $windows_aarch64_url
"signature": $windows_sig,
"url": $windows_url
}
}
}' > last_update.json
@ -482,8 +468,7 @@ jobs:
--arg pub_date "${PUB_DATE}" \
--arg notes "${NOTES}" \
--arg darwin_url "$RELEASE_DIR/dmg/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_universal.dmg" \
--arg windows_x86_64_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_x64_en-US.msi" \
--arg windows_aarch64_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_arm64_en-US.msi" \
--arg windows_url "$RELEASE_DIR/msi/${{ env.URL_CODED_NAME }}_${VERSION_NO_V}_x64_en-US.msi" \
'{
"version": $version,
"pub_date": $pub_date,
@ -493,10 +478,7 @@ jobs:
"url": $darwin_url
},
"msi-x86_64": {
"url": $windows_x86_64_url
},
"msi-aarch64": {
"url": $windows_aarch64_url
"url": $windows_url
}
}
}' > last_download.json
@ -540,7 +522,7 @@ jobs:
announce_release:
needs: [publish-apps-release]
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
if: github.event_name == 'release'
steps:
- name: Check out code

View File

@ -7,7 +7,7 @@ on:
jobs:
create-release:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: read

View File

@ -345,7 +345,7 @@ $env:KITTYCAD_API_TOKEN="<YOUR_KITTYCAD_API_TOKEN>"
$env:VITE_KC_API_BASE_URL="https://api.dev.zoo.dev"
$env:E2E_TAURI_ENABLED="true"
$env:TS_NODE_COMPILER_OPTIONS='{"module": "commonjs"}'
$env:E2E_APPLICATION=".\src-tauri\target\debug\zoo-modeling-app.exe"
$env:E2E_APPLICATION=".\src-tauri\target\debug\Zoo Modeling App.exe"
Stop-Process -Name msedgedriver
yarn wdio run wdio.conf.ts
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 32 KiB

View File

@ -1,5 +1,5 @@
{
"name": "zoo-modeling-app",
"name": "untitled-app",
"version": "0.24.3",
"private": true,
"dependencies": {
@ -113,7 +113,7 @@
"@iarna/toml": "^2.2.5",
"@lezer/generator": "^1.7.1",
"@playwright/test": "^1.45.1",
"@tauri-apps/cli": "==2.0.0-beta.22",
"@tauri-apps/cli": "==2.0.0-beta.13",
"@testing-library/jest-dom": "^5.14.1",
"@testing-library/react": "^15.0.2",
"@types/mocha": "^10.0.6",

1411
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +1,17 @@
[package]
name = "zoo-modeling-app"
name = "app"
version = "0.1.0"
description = "The Zoo Modeling App"
authors = ["Zoo Engineers <eng@zoo.dev>"]
license = ""
repository = "https://github.com/KittyCAD/modeling-app"
default-run = "zoo-modeling-app"
default-run = "app"
edition = "2021"
rust-version = "1.70"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[build-dependencies]
tauri-build = { version = "2.0.0-beta.18", features = [] }
tauri-bundler = { git = "https://github.com/tauri-apps/tauri", rev = "1a88fc1a9b81bd09fc24cf0dfed95d20dd72a1bb" }
[dependencies]
anyhow = "1"

View File

@ -1,7 +1,6 @@
{
"$schema": "../node_modules/@tauri-apps/cli/schema.json",
"bundle": {
"createUpdaterArtifacts": "v1Compatible",
"windows": {
"certificateThumbprint": "F4C9A52FF7BC26EE5E054946F6B11DEEA94C748D",
"digestAlgorithm": "sha256",

View File

@ -47,7 +47,6 @@ import {
PipeExpression,
Program,
ProgramMemory,
programMemoryInit,
recast,
SketchGroup,
ExtrudeGroup,
@ -130,7 +129,7 @@ export const HIDE_HOVER_SEGMENT_LENGTH = 60 // in pixels
export class SceneEntities {
engineCommandManager: EngineCommandManager
scene: Scene
sceneProgramMemory: ProgramMemory = { root: {}, return: null }
sceneProgramMemory: ProgramMemory = ProgramMemory.empty()
activeSegments: { [key: string]: Group } = {}
intersectionPlane: Mesh | null = null
axisGroup: Group | null = null
@ -550,9 +549,9 @@ export class SceneEntities {
const variableDeclarationName =
_node1.node?.declarations?.[0]?.id?.name || ''
const sg = kclManager.programMemory.root[
const sg = kclManager.programMemory.get(
variableDeclarationName
] as SketchGroup
) as SketchGroup
const lastSeg = sg.value.slice(-1)[0] || sg.start
const index = sg.value.length // because we've added a new segment that's not in the memory yet, no need for `-1`
@ -768,9 +767,9 @@ export class SceneEntities {
programMemoryOverride,
})
this.sceneProgramMemory = programMemory
const sketchGroup = programMemory.root[
const sketchGroup = programMemory.get(
variableDeclarationName
] as SketchGroup
) as SketchGroup
const sgPaths = sketchGroup.value
const orthoFactor = orthoScale(sceneInfra.camControls.camera)
@ -820,9 +819,9 @@ export class SceneEntities {
// Prepare to update the THREEjs scene
this.sceneProgramMemory = programMemory
const sketchGroup = programMemory.root[
const sketchGroup = programMemory.get(
variableDeclarationName
] as SketchGroup
) as SketchGroup
const sgPaths = sketchGroup.value
const orthoFactor = orthoScale(sceneInfra.camControls.camera)
@ -1081,9 +1080,9 @@ export class SceneEntities {
})
this.sceneProgramMemory = programMemory
const maybeSketchGroup = programMemory.root[variableDeclarationName]
const maybeSketchGroup = programMemory.get(variableDeclarationName)
let sketchGroup = undefined
if (maybeSketchGroup.type === 'SketchGroup') {
if (maybeSketchGroup?.type === 'SketchGroup') {
sketchGroup = maybeSketchGroup
} else if ((maybeSketchGroup as ExtrudeGroup).sketchGroup) {
sketchGroup = (maybeSketchGroup as ExtrudeGroup).sketchGroup
@ -1773,7 +1772,7 @@ function prepareTruncatedMemoryAndAst(
if (err(_node)) return _node
const variableDeclarationName = _node.node?.declarations?.[0]?.id?.name || ''
const lastSeg = (
programMemory.root[variableDeclarationName] as SketchGroup
programMemory.get(variableDeclarationName) as SketchGroup
).value.slice(-1)[0]
if (draftSegment) {
// truncatedAst needs to setup with another segment at the end
@ -1824,33 +1823,27 @@ function prepareTruncatedMemoryAndAst(
..._ast,
body: [JSON.parse(JSON.stringify(_ast.body[bodyIndex]))],
}
const programMemoryOverride = programMemoryInit()
if (err(programMemoryOverride)) return programMemoryOverride
// Grab all the TagDeclarators and TagIdentifiers from memory.
let start = _node.node.start
for (const key in programMemory.root) {
const value = programMemory.root[key]
if (!('__meta' in value)) {
continue
}
const programMemoryOverride = programMemory.filterVariables(true, (value) => {
if (
!('__meta' in value) ||
value.__meta === undefined ||
value.__meta.length === 0 ||
value.__meta[0].sourceRange === undefined
) {
continue
return false
}
if (value.__meta[0].sourceRange[0] >= start) {
// We only want things before our start point.
continue
return false
}
if (value.type === 'TagIdentifier') {
programMemoryOverride.root[key] = JSON.parse(JSON.stringify(value))
}
}
return value.type === 'TagIdentifier'
})
if (err(programMemoryOverride)) return programMemoryOverride
for (let i = 0; i < bodyIndex; i++) {
const node = _ast.body[i]
@ -1858,12 +1851,15 @@ function prepareTruncatedMemoryAndAst(
continue
}
const name = node.declarations[0].id.name
// const memoryItem = kclManager.programMemory.root[name]
const memoryItem = programMemory.root[name]
const memoryItem = programMemory.get(name)
if (!memoryItem) {
continue
}
programMemoryOverride.root[name] = JSON.parse(JSON.stringify(memoryItem))
const error = programMemoryOverride.set(
name,
JSON.parse(JSON.stringify(memoryItem))
)
if (err(error)) return error
}
return {
truncatedAst,
@ -1900,7 +1896,7 @@ export function sketchGroupFromPathToNode({
)
if (err(_varDec)) return _varDec
const varDec = _varDec.node
const result = programMemory.root[varDec?.id?.name || '']
const result = programMemory.get(varDec?.id?.name || '')
if (result?.type === 'ExtrudeGroup') {
return result.sketchGroup
}

View File

@ -1,5 +1,5 @@
import { useEffect, useState, useRef } from 'react'
import { parse, BinaryPart, Value } from '../lang/wasm'
import { parse, BinaryPart, Value, ProgramMemory } from '../lang/wasm'
import {
createIdentifier,
createLiteral,
@ -120,8 +120,7 @@ export function useCalc({
}, [])
useEffect(() => {
const allVarNames = Object.keys(programMemory.root)
if (allVarNames.includes(newVariableName)) {
if (programMemory.has(newVariableName)) {
setIsNewVariableNameUnique(false)
} else {
setIsNewVariableNameUnique(true)
@ -143,17 +142,20 @@ export function useCalc({
const code = `const __result__ = ${value}`
const ast = parse(code)
if (trap(ast)) return
const _programMem: any = { root: {}, return: null }
availableVarInfo.variables.forEach(({ key, value }) => {
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
const _programMem: ProgramMemory = ProgramMemory.empty()
for (const { key, value } of availableVarInfo.variables) {
const error = _programMem.set(key, {
type: 'UserVal',
value,
__meta: [],
})
if (trap(error)) return
}
executeAst({
ast,
engineCommandManager,
useFakeExecutor: true,
programMemoryOverride: JSON.parse(
JSON.stringify(kclManager.programMemory)
),
programMemoryOverride: kclManager.programMemory.clone(),
}).then(({ programMemory }) => {
const resultDeclaration = ast.body.find(
(a) =>
@ -163,7 +165,7 @@ export function useCalc({
const init =
resultDeclaration?.type === 'VariableDeclaration' &&
resultDeclaration?.declarations?.[0]?.init
const result = programMemory?.root?.__result__?.value
const result = programMemory?.get('__result__')?.value
setCalcResult(typeof result === 'number' ? String(result) : 'NAN')
init && setValueNode(init)
})

View File

@ -163,7 +163,7 @@ export function useCodeMirror(props: UseCodeMirror) {
effects: StateEffect.reconfigure.of(targetExtensions),
})
}
}, [targetExtensions])
}, [targetExtensions, view, isFirstRender])
return { view, setView, container, setContainer, state, setState }
}

View File

@ -1,6 +1,6 @@
import { processMemory } from './MemoryPane'
import { enginelessExecutor } from '../../../lib/testHelpers'
import { initPromise, parse } from '../../../lang/wasm'
import { initPromise, parse, ProgramMemory } from '../../../lang/wasm'
beforeAll(async () => {
await initPromise
@ -29,10 +29,7 @@ describe('processMemory', () => {
|> lineTo([2.15, 4.32], %)
// |> rx(90, %)`
const ast = parse(code)
const programMemory = await enginelessExecutor(ast, {
root: {},
return: null,
})
const programMemory = await enginelessExecutor(ast, ProgramMemory.empty())
const output = processMemory(programMemory)
expect(output.myVar).toEqual(5)
expect(output.otherVar).toEqual(3)

View File

@ -82,8 +82,7 @@ export const MemoryPane = () => {
export const processMemory = (programMemory: ProgramMemory) => {
const processedMemory: any = {}
Object.keys(programMemory?.root || {}).forEach((key) => {
const val = programMemory.root[key]
for (const [key, val] of programMemory?.visibleEntries()) {
if (typeof val.value !== 'function') {
if (val.type === 'SketchGroup') {
processedMemory[key] = val.value.map(({ __geoMeta, ...rest }: Path) => {
@ -103,6 +102,6 @@ export const processMemory = (programMemory: ProgramMemory) => {
} else if (key !== 'log') {
processedMemory[key] = '__function__'
}
})
}
return processedMemory
}

View File

@ -179,6 +179,8 @@ export const Stream = () => {
videoElement: videoRef.current,
},
})
setIsLoading(false)
}, [mediaStream])
const handleMouseDown: MouseEventHandler<HTMLDivElement> = (e) => {

View File

@ -14,9 +14,7 @@ import {
Program,
ProgramMemory,
recast,
SketchGroup,
SourceRange,
ExtrudeGroup,
} from 'lang/wasm'
import { getNodeFromPath } from './queryAst'
import { codeManager, editorManager, sceneInfra } from 'lib/singletons'
@ -33,10 +31,7 @@ export class KclManager {
},
digest: null,
}
private _programMemory: ProgramMemory = {
root: {},
return: null,
}
private _programMemory: ProgramMemory = ProgramMemory.empty()
private _logs: string[] = []
private _kclErrors: KCLError[] = []
private _isExecuting = false
@ -505,10 +500,7 @@ function defaultSelectionFilter(
programMemory: ProgramMemory,
engineCommandManager: EngineCommandManager
) {
const firstSketchOrExtrudeGroup = Object.values(programMemory.root).find(
(node) => node.type === 'ExtrudeGroup' || node.type === 'SketchGroup'
) as SketchGroup | ExtrudeGroup
firstSketchOrExtrudeGroup &&
programMemory.hasSketchOrExtrudeGroup() &&
engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),

View File

@ -16,7 +16,7 @@ const mySketch001 = startSketchOn('XY')
// |> rx(45, %)`
const programMemory = await enginelessExecutor(parse(code))
// @ts-ignore
const sketch001 = programMemory?.root?.mySketch001
const sketch001 = programMemory?.get('mySketch001')
expect(sketch001).toEqual({
type: 'SketchGroup',
on: expect.any(Object),
@ -66,7 +66,7 @@ const mySketch001 = startSketchOn('XY')
|> extrude(2, %)`
const programMemory = await enginelessExecutor(parse(code))
// @ts-ignore
const sketch001 = programMemory?.root?.mySketch001
const sketch001 = programMemory?.get('mySketch001')
expect(sketch001).toEqual({
type: 'ExtrudeGroup',
id: expect.any(String),
@ -146,7 +146,7 @@ const sk2 = startSketchOn('XY')
`
const programMemory = await enginelessExecutor(parse(code))
// @ts-ignore
const geos = [programMemory?.root?.theExtrude, programMemory?.root?.sk2]
const geos = [programMemory?.get('theExtrude'), programMemory?.get('sk2')]
expect(geos).toEqual([
{
type: 'ExtrudeGroup',

View File

@ -12,25 +12,25 @@ describe('test executor', () => {
it('test assigning two variables, the second summing with the first', async () => {
const code = `const myVar = 5
const newVar = myVar + 1`
const { root } = await exe(code)
expect(root.myVar.value).toBe(5)
expect(root.newVar.value).toBe(6)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(5)
expect(mem.get('newVar')?.value).toBe(6)
})
it('test assigning a var with a string', async () => {
const code = `const myVar = "a str"`
const { root } = await exe(code)
expect(root.myVar.value).toBe('a str')
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe('a str')
})
it('test assigning a var by cont concatenating two strings string execute', async () => {
const code = fs.readFileSync(
'./src/lang/testExamples/variableDeclaration.cado',
'utf-8'
)
const { root } = await exe(code)
expect(root.myVar.value).toBe('a str another str')
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe('a str another str')
})
it('fn funcN = () => {} execute', async () => {
const { root } = await exe(
const mem = await exe(
[
'fn funcN = (a, b) => {',
' return a + b',
@ -39,8 +39,8 @@ const newVar = myVar + 1`
'const magicNum = funcN(9, theVar)',
].join('\n')
)
expect(root.theVar.value).toBe(60)
expect(root.magicNum.value).toBe(69)
expect(mem.get('theVar')?.value).toBe(60)
expect(mem.get('magicNum')?.value).toBe(69)
})
it('sketch declaration', async () => {
let code = `const mySketch = startSketchOn('XY')
@ -50,9 +50,9 @@ const newVar = myVar + 1`
|> lineTo([5,-1], %, "rightPath")
// |> close(%)
`
const { root } = await exe(code)
const mem = await exe(code)
// geo is three js buffer geometry and is very bloated to have in tests
const minusGeo = root.mySketch.value
const minusGeo = mem.get('mySketch')?.value
expect(minusGeo).toEqual([
{
type: 'ToPoint',
@ -104,8 +104,8 @@ const newVar = myVar + 1`
'fn myFn = (a) => { return a + 1 }',
'const myVar = 5 + 1 |> myFn(%)',
].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(7)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(7)
})
// Enable rotations #152
@ -117,16 +117,16 @@ const newVar = myVar + 1`
// ' |> lineTo([1, 1], %)',
// 'const rotated = rx(90, mySk1)',
// ].join('\n')
// const { root } = await exe(code)
// expect(root.mySk1.value).toHaveLength(3)
// expect(root?.rotated?.type).toBe('SketchGroup')
// const mem = await exe(code)
// expect(mem.get('mySk1')?.value).toHaveLength(3)
// expect(mem.get('rotated')?.type).toBe('SketchGroup')
// if (
// root?.mySk1?.type !== 'SketchGroup' ||
// root?.rotated?.type !== 'SketchGroup'
// mem.get('mySk1')?.type !== 'SketchGroup' ||
// mem.get('rotated')?.type !== 'SketchGroup'
// )
// throw new Error('not a sketch group')
// expect(root.mySk1.rotation).toEqual([0, 0, 0, 1])
// expect(root.rotated.rotation.map((a) => a.toFixed(4))).toEqual([
// expect(mem.get('mySk1')?.rotation).toEqual([0, 0, 0, 1])
// expect(mem.get('rotated')?.rotation.map((a) => a.toFixed(4))).toEqual([
// '0.7071',
// '0.0000',
// '0.0000',
@ -144,8 +144,8 @@ const newVar = myVar + 1`
' |> lineTo([1,1], %)',
// ' |> rx(90, %)',
].join('\n')
const { root } = await exe(code)
expect(root.mySk1).toEqual({
const mem = await exe(code)
expect(mem.get('mySk1')).toEqual({
type: 'SketchGroup',
on: expect.any(Object),
start: {
@ -214,10 +214,9 @@ const newVar = myVar + 1`
const code = ['const three = 3', "const yo = [1, '2', three, 4 + 5]"].join(
'\n'
)
const { root } = await exe(code)
const mem = await exe(code)
// TODO path to node is probably wrong here, zero indexes are not correct
expect(root).toEqual({
three: {
expect(mem.get('three')).toEqual({
type: 'UserVal',
value: 3,
__meta: [
@ -225,8 +224,8 @@ const newVar = myVar + 1`
sourceRange: [14, 15],
},
],
},
yo: {
})
expect(mem.get('yo')).toEqual({
type: 'UserVal',
value: [1, '2', 3, 9],
__meta: [
@ -234,16 +233,18 @@ const newVar = myVar + 1`
sourceRange: [27, 49],
},
],
},
})
// Check that there are no other variables or environments.
expect(mem.numEnvironments()).toBe(1)
expect(mem.numVariables(0)).toBe(2)
})
it('execute object expression', async () => {
const code = [
'const three = 3',
"const yo = {aStr: 'str', anum: 2, identifier: three, binExp: 4 + 5}",
].join('\n')
const { root } = await exe(code)
expect(root.yo).toEqual({
const mem = await exe(code)
expect(mem.get('yo')).toEqual({
type: 'UserVal',
value: { aStr: 'str', anum: 2, identifier: 3, binExp: 9 },
__meta: [
@ -257,8 +258,8 @@ const newVar = myVar + 1`
const code = ["const yo = {a: {b: '123'}}", "const myVar = yo.a['b']"].join(
'\n'
)
const { root } = await exe(code)
expect(root.myVar).toEqual({
const mem = await exe(code)
expect(mem.get('myVar')).toEqual({
type: 'UserVal',
value: '123',
__meta: [
@ -273,81 +274,81 @@ const newVar = myVar + 1`
describe('testing math operators', () => {
it('can sum', async () => {
const code = ['const myVar = 1 + 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(3)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(3)
})
it('can subtract', async () => {
const code = ['const myVar = 1 - 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(-1)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(-1)
})
it('can multiply', async () => {
const code = ['const myVar = 1 * 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(2)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(2)
})
it('can divide', async () => {
const code = ['const myVar = 1 / 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(0.5)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(0.5)
})
it('can modulus', async () => {
const code = ['const myVar = 5 % 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(1)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(1)
})
it('can do multiple operations', async () => {
const code = ['const myVar = 1 + 2 * 3'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(7)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(7)
})
it('big example with parans', async () => {
const code = ['const myVar = 1 + 2 * (3 - 4) / -5 + 6'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(7.4)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(7.4)
})
it('with identifier', async () => {
const code = ['const yo = 6', 'const myVar = yo / 2'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(3)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(3)
})
it('with lots of testing', async () => {
const code = ['const myVar = 2 * ((2 + 3 ) / 4 + 5)'].join('\n')
const { root } = await exe(code)
expect(root.myVar.value).toBe(12.5)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(12.5)
})
it('with callExpression at start', async () => {
const code = 'const myVar = min(4, 100) + 2'
const { root } = await exe(code)
expect(root.myVar.value).toBe(6)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(6)
})
it('with callExpression at end', async () => {
const code = 'const myVar = 2 + min(4, 100)'
const { root } = await exe(code)
expect(root.myVar.value).toBe(6)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(6)
})
it('with nested callExpression', async () => {
const code = 'const myVar = 2 + min(100, legLen(5, 3))'
const { root } = await exe(code)
expect(root.myVar.value).toBe(6)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(6)
})
it('with unaryExpression', async () => {
const code = 'const myVar = -min(100, 3)'
const { root } = await exe(code)
expect(root.myVar.value).toBe(-3)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(-3)
})
it('with unaryExpression in callExpression', async () => {
const code = 'const myVar = min(-legLen(5, 4), 5)'
const code2 = 'const myVar = min(5 , -legLen(5, 4))'
const { root } = await exe(code)
const { root: root2 } = await exe(code2)
expect(root.myVar.value).toBe(-3)
expect(root.myVar.value).toBe(root2.myVar.value)
const mem = await exe(code)
const mem2 = await exe(code2)
expect(mem.get('myVar')?.value).toBe(-3)
expect(mem.get('myVar')?.value).toBe(mem2.get('myVar')?.value)
})
it('with unaryExpression in ArrayExpression', async () => {
const code = 'const myVar = [1,-legLen(5, 4)]'
const { root } = await exe(code)
expect(root.myVar.value).toEqual([1, -3])
const mem = await exe(code)
expect(mem.get('myVar')?.value).toEqual([1, -3])
})
it('with unaryExpression in ArrayExpression in CallExpression, checking nothing funny happens when used in a sketch', async () => {
const code = [
@ -355,8 +356,8 @@ describe('testing math operators', () => {
' |> startProfileAt([0, 0], %)',
'|> line([-2.21, -legLen(5, min(3, 999))], %)',
].join('\n')
const { root } = await exe(code)
const sketch = root.part001
const mem = await exe(code)
const sketch = mem.get('part001')
// result of `-legLen(5, min(3, 999))` should be -4
const yVal = (sketch as SketchGroup).value?.[0]?.to?.[1]
expect(yVal).toBe(-4)
@ -373,8 +374,8 @@ describe('testing math operators', () => {
`], %)`,
``,
].join('\n')
const { root } = await exe(code)
const sketch = root.part001
const mem = await exe(code)
const sketch = mem.get('part001')
// expect -legLen(segLen('seg01', %), myVar) to equal -4 setting the y value back to 0
expect((sketch as SketchGroup).value?.[1]?.from).toEqual([3, 4])
expect((sketch as SketchGroup).value?.[1]?.to).toEqual([6, 0])
@ -382,18 +383,18 @@ describe('testing math operators', () => {
`-legLen(segLen('seg01', %), myVar)`,
`legLen(segLen('seg01', %), myVar)`
)
const { root: removedUnaryExpRoot } = await exe(removedUnaryExp)
const removedUnaryExpRootSketch = removedUnaryExpRoot.part001
const removedUnaryExpMem = await exe(removedUnaryExp)
const removedUnaryExpMemSketch = removedUnaryExpMem.get('part001')
// without the minus sign, the y value should be 8
expect((removedUnaryExpRootSketch as SketchGroup).value?.[1]?.to).toEqual([
expect((removedUnaryExpMemSketch as SketchGroup).value?.[1]?.to).toEqual([
6, 8,
])
})
it('with nested callExpression and binaryExpression', async () => {
const code = 'const myVar = 2 + min(100, -1 + legLen(5, 3))'
const { root } = await exe(code)
expect(root.myVar.value).toBe(5)
const mem = await exe(code)
expect(mem.get('myVar')?.value).toBe(5)
})
})
@ -421,7 +422,7 @@ const theExtrude = startSketchOn('XY')
async function exe(
code: string,
programMemory: ProgramMemory = { root: {}, return: null }
programMemory: ProgramMemory = ProgramMemory.empty()
) {
const ast = parse(code)

View File

@ -79,20 +79,14 @@ export async function executeAst({
return {
errors: [e],
logs: [],
programMemory: {
root: {},
return: null,
},
programMemory: ProgramMemory.empty(),
}
} else {
console.log(e)
return {
logs: [e],
errors: [],
programMemory: {
root: {},
return: null,
},
programMemory: ProgramMemory.empty(),
}
}
}

View File

@ -983,7 +983,7 @@ export async function deleteFromSelection(
if (err(parent)) {
return
}
const sketchToPreserve = programMemory.root[sketchName] as SketchGroup
const sketchToPreserve = programMemory.get(sketchName) as SketchGroup
console.log('sketchName', sketchName)
// Can't kick off multiple requests at once as getFaceDetails
// is three engine calls in one and they conflict

View File

@ -130,8 +130,14 @@ function moreNodePathFromSourceRange(
const isInRange = _node.start <= start && _node.end >= end
if ((_node.type === 'Identifier' || _node.type === 'Literal') && isInRange)
if (
(_node.type === 'Identifier' ||
_node.type === 'Literal' ||
_node.type === 'TagDeclarator') &&
isInRange
) {
return path
}
if (_node.type === 'CallExpression' && isInRange) {
const { callee, arguments: args } = _node
@ -277,6 +283,15 @@ function moreNodePathFromSourceRange(
}
}
}
return path
}
if (_node.type === 'ReturnStatement' && isInRange) {
const { argument } = _node
if (argument.start <= start && argument.end >= end) {
path.push(['argument', 'ReturnStatement'])
return moreNodePathFromSourceRange(argument, sourceRange, path)
}
return path
}
if (_node.type === 'MemberExpression' && isInRange) {
const { object, property } = _node
@ -459,8 +474,8 @@ export function findAllPreviousVariablesPath(
bodyItems?.forEach?.((item) => {
if (item.type !== 'VariableDeclaration' || item.end > startRange) return
const varName = item.declarations[0].id.name
const varValue = programMemory?.root[varName]
if (typeof varValue?.value !== type) return
const varValue = programMemory?.get(varName)
if (!varValue || typeof varValue?.value !== type) return
variables.push({
key: varName,
value: varValue.value,
@ -640,7 +655,7 @@ export function isLinesParallelAndConstrained(
if (err(_varDec)) return _varDec
const varDec = _varDec.node
const varName = (varDec as VariableDeclaration)?.declarations[0]?.id?.name
const path = programMemory?.root[varName] as SketchGroup
const path = programMemory?.get(varName) as SketchGroup
const _primarySegment = getSketchSegmentFromSourceRange(
path,
primaryLine.range
@ -687,7 +702,7 @@ export function isLinesParallelAndConstrained(
constraintType === 'angle' || constraintLevel === 'full'
// get the previous segment
const prevSegment = (programMemory.root[varName] as SketchGroup).value[
const prevSegment = (programMemory.get(varName) as SketchGroup).value[
secondaryIndex - 1
]
const prevSourceRange = prevSegment.__geoMeta.sourceRange
@ -757,7 +772,7 @@ export function hasExtrudeSketchGroup({
const varDec = varDecMeta.node
if (varDec.type !== 'VariableDeclaration') return false
const varName = varDec.declarations[0].id.name
const varValue = programMemory?.root[varName]
const varValue = programMemory?.get(varName)
return varValue?.type === 'ExtrudeGroup' || varValue?.type === 'SketchGroup'
}

View File

@ -1009,8 +1009,8 @@ export const angledLineOfXLength: SketchLineHelper = {
const { node: varDec } = nodeMeta2
const variableName = varDec.id.name
const sketch = previousProgramMemory?.root?.[variableName]
if (sketch.type !== 'SketchGroup') {
const sketch = previousProgramMemory?.get(variableName)
if (!sketch || sketch.type !== 'SketchGroup') {
return new Error('not a SketchGroup')
}
const angle = createLiteral(roundOff(getAngle(from, to), 0))
@ -1105,8 +1105,8 @@ export const angledLineOfYLength: SketchLineHelper = {
if (err(nodeMeta2)) return nodeMeta2
const { node: varDec } = nodeMeta2
const variableName = varDec.id.name
const sketch = previousProgramMemory?.root?.[variableName]
if (sketch.type !== 'SketchGroup') {
const sketch = previousProgramMemory?.get(variableName)
if (!sketch || sketch.type !== 'SketchGroup') {
return new Error('not a SketchGroup')
}
@ -1443,7 +1443,7 @@ export const angledLineThatIntersects: SketchLineHelper = {
const { node: varDec } = nodeMeta2
const varName = varDec.declarations[0].id.name
const sketchGroup = previousProgramMemory.root[varName] as SketchGroup
const sketchGroup = previousProgramMemory.get(varName) as SketchGroup
const intersectPath = sketchGroup.value.find(
({ tag }: Path) => tag && tag.value === intersectTagName
)

View File

@ -363,7 +363,7 @@ const part001 = startSketchOn('XY')
const programMemory = await enginelessExecutor(parse(code))
const index = code.indexOf('// normal-segment') - 7
const _segment = getSketchSegmentFromSourceRange(
programMemory.root['part001'] as SketchGroup,
programMemory.get('part001') as SketchGroup,
[index, index]
)
if (err(_segment)) throw _segment
@ -379,7 +379,7 @@ const part001 = startSketchOn('XY')
const programMemory = await enginelessExecutor(parse(code))
const index = code.indexOf('// segment-in-start') - 7
const _segment = getSketchSegmentFromSourceRange(
programMemory.root['part001'] as SketchGroup,
programMemory.get('part001') as SketchGroup,
[index, index]
)
if (err(_segment)) throw _segment

View File

@ -1636,8 +1636,8 @@ export function transformAstSketchLines({
})
const varName = varDec.node.id.name
let sketchGroup = programMemory.root?.[varName]
if (sketchGroup.type === 'ExtrudeGroup') {
let sketchGroup = programMemory.get(varName)
if (sketchGroup?.type === 'ExtrudeGroup') {
sketchGroup = sketchGroup.sketchGroup
}
if (!sketchGroup || sketchGroup.type !== 'SketchGroup')

View File

@ -17,9 +17,9 @@ describe('testing angledLineThatIntersects', () => {
offset: ${offset},
}, %, "yo2")
const intersect = segEndX('yo2', part001)`
const { root } = await enginelessExecutor(parse(code('-1')))
expect(root.intersect.value).toBe(1 + Math.sqrt(2))
const { root: noOffset } = await enginelessExecutor(parse(code('0')))
expect(noOffset.intersect.value).toBeCloseTo(1)
const mem = await enginelessExecutor(parse(code('-1')))
expect(mem.get('intersect')?.value).toBe(1 + Math.sqrt(2))
const noOffset = await enginelessExecutor(parse(code('0')))
expect(noOffset.get('intersect')?.value).toBeCloseTo(1)
})
})

View File

@ -143,14 +143,200 @@ interface Memory {
[key: string]: MemoryItem
}
export interface ProgramMemory {
root: Memory
type EnvironmentRef = number
const ROOT_ENVIRONMENT_REF: EnvironmentRef = 0
interface Environment {
bindings: Memory
parent: EnvironmentRef | null
}
function emptyEnvironment(): Environment {
return { bindings: {}, parent: null }
}
interface RawProgramMemory {
environments: Environment[]
currentEnv: EnvironmentRef
return: ProgramReturn | null
}
/**
* This duplicates logic in Rust. The hope is to keep ProgramMemory internals
* isolated from the rest of the TypeScript code so that we can move it to Rust
* in the future.
*/
export class ProgramMemory {
private environments: Environment[]
private currentEnv: EnvironmentRef
private return: ProgramReturn | null
/**
* Empty memory doesn't include prelude definitions.
*/
static empty(): ProgramMemory {
return new ProgramMemory()
}
static fromRaw(raw: RawProgramMemory): ProgramMemory {
return new ProgramMemory(raw.environments, raw.currentEnv, raw.return)
}
constructor(
environments: Environment[] = [emptyEnvironment()],
currentEnv: EnvironmentRef = ROOT_ENVIRONMENT_REF,
returnVal: ProgramReturn | null = null
) {
this.environments = environments
this.currentEnv = currentEnv
this.return = returnVal
}
/**
* Returns a deep copy.
*/
clone(): ProgramMemory {
return ProgramMemory.fromRaw(JSON.parse(JSON.stringify(this.toRaw())))
}
has(name: string): boolean {
let envRef = this.currentEnv
while (true) {
const env = this.environments[envRef]
if (env.bindings.hasOwnProperty(name)) {
return true
}
if (!env.parent) {
break
}
envRef = env.parent
}
return false
}
get(name: string): MemoryItem | null {
let envRef = this.currentEnv
while (true) {
const env = this.environments[envRef]
if (env.bindings.hasOwnProperty(name)) {
return env.bindings[name]
}
if (!env.parent) {
break
}
envRef = env.parent
}
return null
}
set(name: string, value: MemoryItem): Error | null {
if (this.environments.length === 0) {
return new Error('No environment to set memory in')
}
const env = this.environments[this.currentEnv]
env.bindings[name] = value
return null
}
/**
* Returns a new ProgramMemory with only `MemoryItem`s that pass the
* predicate. Values are deep copied.
*
* Note: Return value of the returned ProgramMemory is always null.
*/
filterVariables(
keepPrelude: boolean,
predicate: (value: MemoryItem) => boolean
): ProgramMemory | Error {
const environments: Environment[] = []
for (const [i, env] of this.environments.entries()) {
let bindings: Memory
if (i === ROOT_ENVIRONMENT_REF && keepPrelude) {
// Get prelude definitions. Create these first so that they're always
// first in iteration order.
const memoryOrError = programMemoryInit()
if (err(memoryOrError)) return memoryOrError
bindings = memoryOrError.environments[0].bindings
} else {
bindings = emptyEnvironment().bindings
}
for (const [name, value] of Object.entries(env.bindings)) {
// Check the predicate.
if (!predicate(value)) {
continue
}
// Deep copy.
bindings[name] = JSON.parse(JSON.stringify(value))
}
environments.push({ bindings, parent: env.parent })
}
return new ProgramMemory(environments, this.currentEnv, null)
}
numEnvironments(): number {
return this.environments.length
}
numVariables(envRef: EnvironmentRef): number {
return Object.keys(this.environments[envRef]).length
}
/**
* Returns all variable entries in memory that are visible, in a flat
* structure. If variables are shadowed, they're not visible, and therefore,
* not included.
*
* This should only be used to display in the MemoryPane UI.
*/
visibleEntries(): Map<string, MemoryItem> {
const map = new Map<string, MemoryItem>()
let envRef = this.currentEnv
while (true) {
const env = this.environments[envRef]
for (const [name, value] of Object.entries(env.bindings)) {
// Don't include shadowed variables.
if (!map.has(name)) {
map.set(name, value)
}
}
if (!env.parent) {
break
}
envRef = env.parent
}
return map
}
/**
* Returns true if any visible variables are a SketchGroup or ExtrudeGroup.
*/
hasSketchOrExtrudeGroup(): boolean {
for (const node of this.visibleEntries().values()) {
if (node.type === 'ExtrudeGroup' || node.type === 'SketchGroup') {
return true
}
}
return false
}
/**
* Return the representation that can be serialized to JSON. This should only
* be used within this module.
*/
toRaw(): RawProgramMemory {
return {
environments: this.environments,
currentEnv: this.currentEnv,
return: this.return,
}
}
}
export const executor = async (
node: Program,
programMemory: ProgramMemory | Error = { root: {}, return: null },
programMemory: ProgramMemory | Error = ProgramMemory.empty(),
engineCommandManager: EngineCommandManager,
isMock: boolean = false
): Promise<ProgramMemory> => {
@ -171,7 +357,7 @@ export const executor = async (
export const _executor = async (
node: Program,
programMemory: ProgramMemory | Error = { root: {}, return: null },
programMemory: ProgramMemory | Error = ProgramMemory.empty(),
engineCommandManager: EngineCommandManager,
isMock: boolean
): Promise<ProgramMemory> => {
@ -186,15 +372,15 @@ export const _executor = async (
baseUnit =
(await getSettingsState)()?.modeling.defaultUnit.current || 'mm'
}
const memory: ProgramMemory = await execute_wasm(
const memory: RawProgramMemory = await execute_wasm(
JSON.stringify(node),
JSON.stringify(programMemory),
JSON.stringify(programMemory.toRaw()),
baseUnit,
engineCommandManager,
fileSystemManager,
isMock
)
return memory
return ProgramMemory.fromRaw(memory)
} catch (e: any) {
console.log(e)
const parsed: RustKclError = JSON.parse(e.toString())
@ -329,10 +515,17 @@ export function getTangentialArcToInfo({
}
}
/**
* Returns new ProgramMemory with prelude definitions.
*/
export function programMemoryInit(): ProgramMemory | Error {
try {
const memory: ProgramMemory = program_memory_init()
return memory
const memory: RawProgramMemory = program_memory_init()
return new ProgramMemory(
memory.environments,
memory.currentEnv,
memory.return
)
} catch (e: any) {
console.log(e)
const parsed: RustKclError = JSON.parse(e.toString())

View File

@ -75,7 +75,7 @@ class MockEngineCommandManager {
export async function enginelessExecutor(
ast: Program | Error,
pm: ProgramMemory | Error = { root: {}, return: null }
pm: ProgramMemory | Error = ProgramMemory.empty()
): Promise<ProgramMemory> {
if (err(ast)) return Promise.reject(ast)
if (err(pm)) return Promise.reject(pm)
@ -93,7 +93,7 @@ export async function enginelessExecutor(
export async function executor(
ast: Program,
pm: ProgramMemory = { root: {}, return: null }
pm: ProgramMemory = ProgramMemory.empty()
): Promise<ProgramMemory> {
const engineCommandManager = new EngineCommandManager()
engineCommandManager.start({

View File

@ -3,7 +3,7 @@ import { kclManager, engineCommandManager } from 'lib/singletons'
import { useKclContext } from 'lang/KclProvider'
import { findUniqueName } from 'lang/modifyAst'
import { PrevVariable, findAllPreviousVariables } from 'lang/queryAst'
import { Value, parse } from 'lang/wasm'
import { ProgramMemory, Value, parse } from 'lang/wasm'
import { useEffect, useRef, useState } from 'react'
import { executeAst } from 'lang/langHelpers'
import { err, trap } from 'lib/trap'
@ -60,9 +60,8 @@ export function useCalculateKclExpression({
}, [])
useEffect(() => {
const allVarNames = Object.keys(programMemory.root)
if (
allVarNames.includes(newVariableName) ||
programMemory.has(newVariableName) ||
newVariableName === '' ||
!isValidVariableName(newVariableName)
) {
@ -89,17 +88,20 @@ export function useCalculateKclExpression({
if (err(ast)) return
if (trap(ast, { suppress: true })) return
const _programMem: any = { root: {}, return: null }
availableVarInfo.variables.forEach(({ key, value }) => {
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
const _programMem: ProgramMemory = ProgramMemory.empty()
for (const { key, value } of availableVarInfo.variables) {
const error = _programMem.set(key, {
type: 'UserVal',
value,
__meta: [],
})
if (trap(error, { suppress: true })) return
}
const { programMemory } = await executeAst({
ast,
engineCommandManager,
useFakeExecutor: true,
programMemoryOverride: JSON.parse(
JSON.stringify(kclManager.programMemory)
),
programMemoryOverride: kclManager.programMemory.clone(),
})
const resultDeclaration = ast.body.find(
(a) =>
@ -109,7 +111,7 @@ export function useCalculateKclExpression({
const init =
resultDeclaration?.type === 'VariableDeclaration' &&
resultDeclaration?.declarations?.[0]?.init
const result = programMemory?.root?.__result__?.value
const result = programMemory?.get('__result__')?.value
setCalcResult(typeof result === 'number' ? String(result) : 'NAN')
init && setValueNode(init)
}

View File

@ -1139,8 +1139,8 @@ export const modelingMachine = createMachine(
)
if (err(varDecNode)) return
const sketchVar = varDecNode.node.declarations[0].id.name
const sketchGroup = kclManager.programMemory.root[sketchVar]
if (sketchGroup.type !== 'SketchGroup') return
const sketchGroup = kclManager.programMemory.get(sketchVar)
if (sketchGroup?.type !== 'SketchGroup') return
const idArtifact = engineCommandManager.artifactMap[sketchGroup.id]
if (idArtifact.commandType !== 'start_path') return
const extrusionArtifactId = (idArtifact as any)?.extrusions?.[0]

View File

@ -1388,7 +1388,7 @@ impl CallExpression {
}
FunctionKind::UserDefined => {
let func = memory.get(&fn_name, self.into())?;
let result = func.call_fn(fn_args, memory.clone(), ctx.clone()).await.map_err(|e| {
let result = func.call_fn(fn_args, ctx.clone()).await.map_err(|e| {
// Add the call expression to the source ranges.
e.add_source_ranges(vec![self.into()])
})?;
@ -2880,6 +2880,30 @@ impl BinaryExpression {
pipe_info: &PipeInfo,
ctx: &ExecutorContext,
) -> Result<MemoryItem, KclError> {
// First check if we are doing short-circuiting logical operator.
if self.operator == BinaryOperator::LogicalOr {
let left_json_value = self.left.get_result(memory, pipe_info, ctx).await?.get_json_value()?;
let left = json_to_bool(&left_json_value);
if left {
// Short-circuit.
return Ok(MemoryItem::UserVal(UserVal {
value: serde_json::Value::Bool(left),
meta: vec![Metadata {
source_range: self.into(),
}],
}));
}
let right_json_value = self.right.get_result(memory, pipe_info, ctx).await?.get_json_value()?;
let right = json_to_bool(&right_json_value);
return Ok(MemoryItem::UserVal(UserVal {
value: serde_json::Value::Bool(right),
meta: vec![Metadata {
source_range: self.into(),
}],
}));
}
let left_json_value = self.left.get_result(memory, pipe_info, ctx).await?.get_json_value()?;
let right_json_value = self.right.get_result(memory, pipe_info, ctx).await?.get_json_value()?;
@ -2909,6 +2933,9 @@ impl BinaryExpression {
BinaryOperator::Div => (left / right).into(),
BinaryOperator::Mod => (left % right).into(),
BinaryOperator::Pow => (left.powf(right)).into(),
BinaryOperator::LogicalOr => {
unreachable!("LogicalOr should have been handled above")
}
};
Ok(MemoryItem::UserVal(UserVal {
@ -2950,6 +2977,27 @@ pub fn parse_json_value_as_string(j: &serde_json::Value) -> Option<String> {
}
}
pub fn json_to_bool(j: &serde_json::Value) -> bool {
match j {
JValue::Null => false,
JValue::Bool(b) => *b,
JValue::Number(n) => {
if let Some(n) = n.as_u64() {
n != 0
} else if let Some(n) = n.as_i64() {
n != 0
} else if let Some(x) = n.as_f64() {
x != 0.0 && !x.is_nan()
} else {
false
}
}
JValue::String(s) => !s.is_empty(),
JValue::Array(a) => !a.is_empty(),
JValue::Object(_) => false,
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, FromStr, Display, Bake)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
@ -2980,6 +3028,10 @@ pub enum BinaryOperator {
#[serde(rename = "^")]
#[display("^")]
Pow,
/// Logical OR.
#[serde(rename = "||")]
#[display("||")]
LogicalOr,
}
/// Mathematical associativity.
@ -3008,6 +3060,7 @@ impl BinaryOperator {
BinaryOperator::Div => *b"div",
BinaryOperator::Mod => *b"mod",
BinaryOperator::Pow => *b"pow",
BinaryOperator::LogicalOr => *b"lor",
}
}
@ -3018,6 +3071,7 @@ impl BinaryOperator {
BinaryOperator::Add | BinaryOperator::Sub => 11,
BinaryOperator::Mul | BinaryOperator::Div | BinaryOperator::Mod => 12,
BinaryOperator::Pow => 6,
BinaryOperator::LogicalOr => 3,
}
}
@ -3025,7 +3079,7 @@ impl BinaryOperator {
/// Taken from <https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Operator_precedence#table>
pub fn associativity(&self) -> Associativity {
match self {
Self::Add | Self::Sub | Self::Mul | Self::Div | Self::Mod => Associativity::Left,
Self::Add | Self::Sub | Self::Mul | Self::Div | Self::Mod | Self::LogicalOr => Associativity::Left,
Self::Pow => Associativity::Right,
}
}
@ -3089,6 +3143,21 @@ impl UnaryExpression {
pipe_info: &PipeInfo,
ctx: &ExecutorContext,
) -> Result<MemoryItem, KclError> {
if self.operator == UnaryOperator::Not {
let value = self
.argument
.get_result(memory, pipe_info, ctx)
.await?
.get_json_value()?;
let negated = !json_to_bool(&value);
return Ok(MemoryItem::UserVal(UserVal {
value: serde_json::Value::Bool(negated),
meta: vec![Metadata {
source_range: self.into(),
}],
}));
}
let num = parse_json_number_as_f64(
&self
.argument

View File

@ -23,7 +23,8 @@ use crate::{
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct ProgramMemory {
pub root: HashMap<String, MemoryItem>,
pub environments: Vec<Environment>,
pub current_env: EnvironmentRef,
#[serde(rename = "return")]
pub return_: Option<ProgramReturn>,
}
@ -31,7 +32,105 @@ pub struct ProgramMemory {
impl ProgramMemory {
pub fn new() -> Self {
Self {
root: HashMap::from([
environments: vec![Environment::root()],
current_env: EnvironmentRef::root(),
return_: None,
}
}
pub fn new_env_for_call(&mut self, parent: EnvironmentRef) -> EnvironmentRef {
let new_env_ref = EnvironmentRef(self.environments.len());
let new_env = Environment::new(parent);
self.environments.push(new_env);
new_env_ref
}
/// Add to the program memory in the current scope.
pub fn add(&mut self, key: &str, value: MemoryItem, source_range: SourceRange) -> Result<(), KclError> {
if self.environments[self.current_env.index()].contains_key(key) {
return Err(KclError::ValueAlreadyDefined(KclErrorDetails {
message: format!("Cannot redefine `{}`", key),
source_ranges: vec![source_range],
}));
}
self.environments[self.current_env.index()].insert(key.to_string(), value);
Ok(())
}
/// Get a value from the program memory.
/// Return Err if not found.
pub fn get(&self, var: &str, source_range: SourceRange) -> Result<&MemoryItem, KclError> {
let mut env_ref = self.current_env;
loop {
let env = &self.environments[env_ref.index()];
if let Some(item) = env.bindings.get(var) {
return Ok(item);
}
if let Some(parent) = env.parent {
env_ref = parent;
} else {
break;
}
}
Err(KclError::UndefinedValue(KclErrorDetails {
message: format!("memory item key `{}` is not defined", var),
source_ranges: vec![source_range],
}))
}
/// Find all extrude groups in the memory that are on a specific sketch group id.
/// This does not look inside closures. But as long as we do not allow
/// mutation of variables in KCL, closure memory should be a subset of this.
pub fn find_extrude_groups_on_sketch_group(&self, sketch_group_id: uuid::Uuid) -> Vec<Box<ExtrudeGroup>> {
self.environments
.iter()
.flat_map(|env| {
env.bindings
.values()
.filter_map(|item| match item {
MemoryItem::ExtrudeGroup(eg) if eg.sketch_group.id == sketch_group_id => Some(eg.clone()),
_ => None,
})
.collect::<Vec<_>>()
})
.collect()
}
}
impl Default for ProgramMemory {
fn default() -> Self {
Self::new()
}
}
/// An index pointing to an environment.
#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
pub struct EnvironmentRef(usize);
impl EnvironmentRef {
pub fn root() -> Self {
Self(0)
}
pub fn index(&self) -> usize {
self.0
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
pub struct Environment {
bindings: HashMap<String, MemoryItem>,
parent: Option<EnvironmentRef>,
}
impl Environment {
pub fn root() -> Self {
Self {
// Prelude
bindings: HashMap::from([
(
"ZERO".to_string(),
MemoryItem::UserVal(UserVal {
@ -61,28 +160,19 @@ impl ProgramMemory {
}),
),
]),
return_: None,
parent: None,
}
}
/// Add to the program memory.
pub fn add(&mut self, key: &str, value: MemoryItem, source_range: SourceRange) -> Result<(), KclError> {
if self.root.contains_key(key) {
return Err(KclError::ValueAlreadyDefined(KclErrorDetails {
message: format!("Cannot redefine `{}`", key),
source_ranges: vec![source_range],
}));
pub fn new(parent: EnvironmentRef) -> Self {
Self {
bindings: HashMap::new(),
parent: Some(parent),
}
}
self.root.insert(key.to_string(), value);
Ok(())
}
/// Get a value from the program memory.
/// Return Err if not found.
pub fn get(&self, key: &str, source_range: SourceRange) -> Result<&MemoryItem, KclError> {
self.root.get(key).ok_or_else(|| {
self.bindings.get(key).ok_or_else(|| {
KclError::UndefinedValue(KclErrorDetails {
message: format!("memory item key `{}` is not defined", key),
source_ranges: vec![source_range],
@ -90,21 +180,12 @@ impl ProgramMemory {
})
}
/// Find all extrude groups in the memory that are on a specific sketch group id.
pub fn find_extrude_groups_on_sketch_group(&self, sketch_group_id: uuid::Uuid) -> Vec<Box<ExtrudeGroup>> {
self.root
.values()
.filter_map(|item| match item {
MemoryItem::ExtrudeGroup(eg) if eg.sketch_group.id == sketch_group_id => Some(eg.clone()),
_ => None,
})
.collect()
}
pub fn insert(&mut self, key: String, value: MemoryItem) {
self.bindings.insert(key, value);
}
impl Default for ProgramMemory {
fn default() -> Self {
Self::new()
pub fn contains_key(&self, key: &str) -> bool {
self.bindings.contains_key(key)
}
}
@ -161,6 +242,7 @@ pub enum MemoryItem {
#[serde(skip)]
func: Option<MemoryFunction>,
expression: Box<FunctionExpression>,
memory: Box<ProgramMemory>,
#[serde(rename = "__meta")]
meta: Vec<Metadata>,
},
@ -620,7 +702,7 @@ impl MemoryItem {
.map(Some)
}
fn as_user_val(&self) -> Option<&UserVal> {
pub fn as_user_val(&self) -> Option<&UserVal> {
if let MemoryItem::UserVal(x) = self {
Some(x)
} else {
@ -642,27 +724,21 @@ impl MemoryItem {
}
/// If this value is of type function, return it.
pub fn get_function(&self, source_ranges: Vec<SourceRange>) -> Result<FnAsArg<'_>, KclError> {
pub fn get_function(&self) -> Option<FnAsArg<'_>> {
let MemoryItem::Function {
func,
expression,
memory,
meta: _,
} = &self
else {
return Err(KclError::Semantic(KclErrorDetails {
message: "not an in-memory function".to_string(),
source_ranges,
}));
return None;
};
let func = func.as_ref().ok_or_else(|| {
KclError::Semantic(KclErrorDetails {
message: format!("Not an in-memory function: {:?}", expression),
source_ranges,
})
})?;
Ok(FnAsArg {
let func = func.as_ref()?;
Some(FnAsArg {
func,
expr: expression.to_owned(),
memory: memory.to_owned(),
})
}
@ -736,10 +812,15 @@ impl MemoryItem {
pub async fn call_fn(
&self,
args: Vec<MemoryItem>,
memory: ProgramMemory,
ctx: ExecutorContext,
) -> Result<Option<ProgramReturn>, KclError> {
let MemoryItem::Function { func, expression, meta } = &self else {
let MemoryItem::Function {
func,
expression,
memory: closure_memory,
meta,
} = &self
else {
return Err(KclError::Semantic(KclErrorDetails {
message: "not a in memory function".to_string(),
source_ranges: vec![],
@ -751,7 +832,14 @@ impl MemoryItem {
source_ranges: vec![],
}));
};
func(args, memory, expression.clone(), meta.clone(), ctx).await
func(
args,
closure_memory.as_ref().clone(),
expression.clone(),
meta.clone(),
ctx,
)
.await
}
}
@ -1560,16 +1648,13 @@ impl ExecutorContext {
memory.return_ = result.return_;
}
FunctionKind::UserDefined => {
if let Some(func) = memory.clone().root.get(&fn_name) {
let result = func.call_fn(args.clone(), memory.clone(), self.clone()).await?;
// TODO: Why do we change the source range to
// the call expression instead of keeping the
// range of the callee?
let func = memory.get(&fn_name, call_expr.into())?;
let result = func.call_fn(args.clone(), self.clone()).await?;
memory.return_ = result;
} else {
return Err(KclError::Semantic(KclErrorDetails {
message: format!("No such name {} defined", fn_name),
source_ranges: vec![call_expr.into()],
}));
}
}
}
}
@ -1680,7 +1765,15 @@ impl ExecutorContext {
_metadata: Vec<Metadata>,
ctx: ExecutorContext| {
Box::pin(async move {
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
// Create a new environment to execute the function
// body in so that local variables shadow variables
// in the parent scope. The new environment's
// parent should be the environment of the closure.
let mut body_memory = memory.clone();
let closure_env = memory.current_env;
let body_env = body_memory.new_env_for_call(closure_env);
body_memory.current_env = body_env;
let mut fn_memory = assign_args_to_params(&function_expression, args, body_memory)?;
let result = ctx
.inner_execute(&function_expression.body, &mut fn_memory, BodyType::Block)
@ -1690,10 +1783,14 @@ impl ExecutorContext {
})
},
);
// Cloning memory here is crucial for semantics so that we close
// over variables. Variables defined lexically later shouldn't
// be available to the function body.
MemoryItem::Function {
expression: function_expression.clone(),
meta: vec![metadata.to_owned()],
func: Some(mem_func),
memory: Box::new(memory.clone()),
}
}
Value::CallExpression(call_expression) => call_expression.execute(memory, pipe_info, self).await?,
@ -1796,7 +1893,8 @@ fn assign_args_to_params(
return Err(err_wrong_number_args);
}
// Add the arguments to the memory.
// Add the arguments to the memory. A new call frame should have already
// been created.
for (index, param) in function_expression.params.iter().enumerate() {
if let Some(arg) = args.get(index) {
// Argument was provided.
@ -1862,11 +1960,19 @@ const newVar = myVar + 1"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(5),
memory.root.get("myVar").unwrap().get_json_value().unwrap()
memory
.get("myVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
assert_eq!(
serde_json::json!(6.0),
memory.root.get("newVar").unwrap().get_json_value().unwrap()
memory
.get("newVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -1891,13 +1997,21 @@ const intersect = segEndX('yo2', part001)"#,
let memory = parse_execute(&ast_fn("-1")).await.unwrap();
assert_eq!(
serde_json::json!(1.0 + 2.0f64.sqrt()),
memory.root.get("intersect").unwrap().get_json_value().unwrap()
memory
.get("intersect", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
let memory = parse_execute(&ast_fn("0")).await.unwrap();
assert_eq!(
serde_json::json!(1.0000000000000002),
memory.root.get("intersect").unwrap().get_json_value().unwrap()
memory
.get("intersect", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2215,13 +2329,252 @@ const thisBox = box([[0,0], 6, 10, 3])
parse_execute(ast).await.unwrap();
}
#[tokio::test(flavor = "multi_thread")]
async fn test_function_cannot_access_future_definitions() {
let ast = r#"
fn returnX = () => {
// x shouldn't be defined yet.
return x
}
const x = 5
const answer = returnX()"#;
let result = parse_execute(ast).await;
let err = result.unwrap_err().downcast::<KclError>().unwrap();
assert_eq!(
err,
KclError::UndefinedValue(KclErrorDetails {
message: "memory item key `x` is not defined".to_owned(),
source_ranges: vec![SourceRange([64, 65]), SourceRange([97, 106])],
}),
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_pattern_transform_function_cannot_access_future_definitions() {
let ast = r#"
fn transform = (replicaId) => {
// x shouldn't be defined yet.
let scale = x
return {
translate: [0, 0, replicaId * 10],
scale: [scale, 1, 0],
}
}
fn layer = () => {
return startSketchOn("XY")
|> circle([0, 0], 1, %, 'tag1')
|> extrude(10, %)
}
const x = 5
// The 10 layers are replicas of each other, with a transform applied to each.
let shape = layer() |> patternTransform(10, transform, %)
"#;
let result = parse_execute(ast).await;
let err = result.unwrap_err().downcast::<KclError>().unwrap();
assert_eq!(
err,
KclError::UndefinedValue(KclErrorDetails {
message: "memory item key `x` is not defined".to_owned(),
source_ranges: vec![SourceRange([80, 81])],
}),
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_execute_function_with_parameter_redefined_outside() {
let ast = r#"
fn myIdentity = (x) => {
return x
}
const x = 33
const two = myIdentity(2)"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(2),
memory
.get("two", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
assert_eq!(
serde_json::json!(33),
memory
.get("x", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_execute_function_referencing_variable_in_parent_scope() {
let ast = r#"
const x = 22
const y = 3
fn add = (x) => {
return x + y
}
const answer = add(2)"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(5.0),
memory
.get("answer", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
assert_eq!(
serde_json::json!(22),
memory
.get("x", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_execute_function_redefining_variable_in_parent_scope() {
let ast = r#"
const x = 1
fn foo = () => {
const x = 2
return x
}
const answer = foo()"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(2),
memory
.get("answer", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
assert_eq!(
serde_json::json!(1),
memory
.get("x", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_execute_pattern_transform_function_redefining_variable_in_parent_scope() {
let ast = r#"
const scale = 100
fn transform = (replicaId) => {
// Redefine same variable as in parent scope.
const scale = 2
return {
translate: [0, 0, replicaId * 10],
scale: [scale, 1, 0],
}
}
fn layer = () => {
return startSketchOn("XY")
|> circle([0, 0], 1, %, 'tag1')
|> extrude(10, %)
}
// The 10 layers are replicas of each other, with a transform applied to each.
let shape = layer() |> patternTransform(10, transform, %)"#;
let memory = parse_execute(ast).await.unwrap();
// TODO: Assert that scale 2 was used.
assert_eq!(
serde_json::json!(100),
memory
.get("scale", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_execute_ycombinator_is_even() {
let ast = r#"
// Heavily inspired by: https://raganwald.com/2018/09/10/why-y.html
fn why = (f) => {
fn inner = (maker) => {
fn inner2 = (x) => {
return f(maker(maker), x)
}
return inner2
}
return inner(
(maker) => {
fn inner2 = (x) => {
return f(maker(maker), x)
}
return inner2
}
)
}
fn innerIsEven = (self, n) => {
return !n || !self(n - 1)
}
const isEven = why(innerIsEven)
const two = isEven(2)
const three = isEven(3)
"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(true),
memory
.get("two", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
assert_eq!(
serde_json::json!(false),
memory
.get("three", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_math_execute_with_functions() {
let ast = r#"const myVar = 2 + min(100, -1 + legLen(5, 3))"#;
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(5.0),
memory.root.get("myVar").unwrap().get_json_value().unwrap()
memory
.get("myVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2231,7 +2584,11 @@ const thisBox = box([[0,0], 6, 10, 3])
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(7.4),
memory.root.get("myVar").unwrap().get_json_value().unwrap()
memory
.get("myVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2241,7 +2598,11 @@ const thisBox = box([[0,0], 6, 10, 3])
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(1.0),
memory.root.get("myVar").unwrap().get_json_value().unwrap()
memory
.get("myVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2251,7 +2612,11 @@ const thisBox = box([[0,0], 6, 10, 3])
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(std::f64::consts::TAU),
memory.root.get("myVar").unwrap().get_json_value().unwrap()
memory
.get("myVar", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2261,7 +2626,11 @@ const thisBox = box([[0,0], 6, 10, 3])
let memory = parse_execute(ast).await.unwrap();
assert_eq!(
serde_json::json!(7.4),
memory.root.get("thing").unwrap().get_json_value().unwrap()
memory
.get("thing", SourceRange::default())
.unwrap()
.get_json_value()
.unwrap()
);
}
@ -2391,7 +2760,9 @@ const bracket = startSketchOn('XY')
fn additional_program_memory(items: &[(String, MemoryItem)]) -> ProgramMemory {
let mut program_memory = ProgramMemory::new();
for (name, item) in items {
program_memory.root.insert(name.to_string(), item.clone());
program_memory
.add(name.as_str(), item.clone(), SourceRange::default())
.unwrap();
}
program_memory
}

View File

@ -299,6 +299,7 @@ fn binary_operator(i: TokenSlice) -> PResult<BinaryOperator> {
"*" => BinaryOperator::Mul,
"%" => BinaryOperator::Mod,
"^" => BinaryOperator::Pow,
"||" => BinaryOperator::LogicalOr,
_ => {
return Err(KclError::Syntax(KclErrorDetails {
source_ranges: token.as_source_ranges(),
@ -1136,11 +1137,11 @@ fn unary_expression(i: TokenSlice) -> PResult<UnaryExpression> {
let (operator, op_token) = any
.try_map(|token: Token| match token.token_type {
TokenType::Operator if token.value == "-" => Ok((UnaryOperator::Neg, token)),
// TODO: negation. Original parser doesn't support `not` yet.
TokenType::Operator => Err(KclError::Syntax(KclErrorDetails {
source_ranges: token.as_source_ranges(),
message: format!("{EXPECTED} but found {} which is an operator, but not a unary one (unary operators apply to just a single operand, your operator applies to two or more operands)", token.value.as_str(),),
})),
TokenType::Bang => Ok((UnaryOperator::Not, token)),
other => Err(KclError::Syntax(KclErrorDetails { source_ranges: token.as_source_ranges(), message: format!("{EXPECTED} but found {} which is {}", token.value.as_str(), other,) })),
})
.context(expected("a unary expression, e.g. -x or -3"))

View File

@ -79,7 +79,7 @@ impl From<ParseError<&[Token], ContextError>> for KclError {
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Syntax(KclErrorDetails {
source_ranges: bad_token.as_source_ranges(),
message: "Unexpected token".to_string(),
message: format!("Unexpected token: {}", bad_token.value),
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -31,7 +31,7 @@ use crate::{
ast::types::FunctionExpression,
docs::StdLibFn,
errors::KclError,
executor::{MemoryItem, SketchGroup, SketchSurface},
executor::{MemoryItem, ProgramMemory, SketchGroup, SketchSurface},
std::kcl_stdlib::KclStdLibFn,
};
pub use args::Args;
@ -281,6 +281,7 @@ pub enum Primitive {
pub struct FnAsArg<'a> {
pub func: &'a crate::executor::MemoryFunction,
pub expr: Box<FunctionExpression>,
pub memory: Box<ProgramMemory>,
}
#[cfg(test)]

View File

@ -87,7 +87,7 @@ pub async fn pattern_transform(args: Args) -> Result<MemoryItem, KclError> {
fn_expr: transform.expr,
meta: vec![args.source_range.into()],
ctx: args.ctx.clone(),
memory: args.current_program_memory.clone(),
memory: *transform.memory,
},
extr,
&args,

View File

@ -90,7 +90,7 @@ fn word(i: &mut Located<&str>) -> PResult<Token> {
fn operator(i: &mut Located<&str>) -> PResult<Token> {
let (value, range) = alt((
">=", "<=", "==", "=>", "!= ", "|>", "*", "+", "-", "/", "%", "=", "<", ">", r"\", "|", "^",
">=", "<=", "==", "=>", "!= ", "|>", "*", "+", "-", "/", "%", "=", "<", ">", r"\", "||", "|", "^",
))
.with_span()
.parse_next(i)?;

View File

@ -1308,7 +1308,7 @@ async fn serial_test_stdlib_kcl_error_right_code_path() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([157, 175])], message: "Expected a SketchGroup or SketchSurface as the third argument, found `[UserVal(UserVal { value: Array [Number(2), Number(2)], meta: [Metadata { source_range: SourceRange([164, 170]) }] }), UserVal(UserVal { value: Number(0.5), meta: [Metadata { source_range: SourceRange([172, 174]) }] })]`" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([157, 175])], message: "Expected an argument at index 2" }"#,
);
}
@ -1406,7 +1406,7 @@ const part = rectShape([0, 0], 20, 20)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([891, 940])], message: "Expected a [number, number] as the first argument, found `[UserVal(UserVal { value: String(\"XY\"), meta: [Metadata { source_range: SourceRange([898, 902]) }] }), UserVal(UserVal { value: Array [Number(-6.0), Number(6)], meta: [Metadata { source_range: SourceRange([904, 927]) }] }), UserVal(UserVal { value: Number(1), meta: [Metadata { source_range: SourceRange([760, 761]) }] })]`" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([891, 940])], message: "Argument at index 0 was supposed to be type [f64; 2] but wasn't" }"#,
);
}
@ -1784,31 +1784,31 @@ const part002 = startSketchOn(part001, 'end')
#[tokio::test(flavor = "multi_thread")]
async fn serial_test_plumbus_fillets() {
let code = r#"fn make_circle = (ext, face, tag ,pos, radius) => {
let code = r#"fn make_circle = (ext, face, pos, radius) => {
const sg = startSketchOn(ext, face)
|> startProfileAt([pos[0] + radius, pos[1]], %)
|> arc({
angle_end: 360,
angle_start: 0,
radius: radius
}, %, tag)
}, %, $arc1)
|> close(%)
return sg
}
fn pentagon = (len, taga, tagb, tagc) => {
fn pentagon = (len) => {
const sg = startSketchOn('XY')
|> startProfileAt([-len / 2, -len / 2], %)
|> angledLine({ angle: 0, length: len }, %,taga)
|> angledLine({ angle: 0, length: len }, %, $a)
|> angledLine({
angle: segAng(a, %) + 180 - 108,
length: len
}, %, tagb)
}, %, $b)
|> angledLine({
angle: segAng(b, %) + 180 - 108,
length: len
}, %,tagc)
}, %, $c)
|> angledLine({
angle: segAng(c, %) + 180 - 108,
length: len
@ -1821,21 +1821,23 @@ fn pentagon = (len, taga, tagb, tagc) => {
return sg
}
const p = pentagon(32, $a, $b, $c)
const p = pentagon(32)
|> extrude(10, %)
const plumbus0 = make_circle(p,a, $arc_a, [0, 0], 2.5)
const circle0 = make_circle(p, p.sketchGroup.tags.a, [0, 0], 2.5)
const plumbus0 = circle0
|> extrude(10, %)
|> fillet({
radius: 0.5,
tags: [arc_a, getOppositeEdge(arc_a, %)]
tags: [circle0.tags.arc1, getOppositeEdge(circle0.tags.arc1, %)]
}, %)
const plumbus1 = make_circle(p, b,$arc_b, [0, 0], 2.5)
const circle1 = make_circle(p, p.sketchGroup.tags.b, [0, 0], 2.5)
const plumbus1 = circle1
|> extrude(10, %)
|> fillet({
radius: 0.5,
tags: [arc_b, getOppositeEdge(arc_b, %)]
tags: [circle1.tags.arc1, getOppositeEdge(circle1.tags.arc1, %)]
}, %)
"#;

View File

@ -39,7 +39,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Program, uuid
// We need to get the sketch ID.
// Get the sketch group ID from memory.
let MemoryItem::SketchGroup(sketch_group) = memory.root.get(name).unwrap() else {
let MemoryItem::SketchGroup(sketch_group) = memory.get(name, SourceRange::default()).unwrap() else {
anyhow::bail!("part001 not found in memory: {:?}", memory);
};
let sketch_id = sketch_group.id;

108
yarn.lock
View File

@ -1922,71 +1922,71 @@
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.0.0-beta.14.tgz#8c1c65c07559cd29c5103a99e0abe5331cc2246f"
integrity sha512-YLYgHqdwWswr4Y70+hRzaLD6kLIUgHhE3shLXNquPiTaQ9+cX3Q2dB0AFfqsua6NXYFNe7LfkmMzaqEzqv3yQg==
"@tauri-apps/cli-darwin-arm64@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.0.0-beta.22.tgz#5e41c880a4b324bf5c1345fec7259c6b99be2caf"
integrity sha512-Ofhythvg1Ks2IM87WUYNtgFzm21aU1Zn+8QP81lJy9Y7ZGMxP8FYfqeHz6GIWKI+CYf6I77HA8LHkT9pyE5PYg==
"@tauri-apps/cli-darwin-arm64@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.0.0-beta.13.tgz#4926b310f5c39f967753c1c6b9aa20916011ebb6"
integrity sha512-/ibwIj1n2TQSXazGr79K4sfiZ85JndGXjMVN5QD9M8AkhpqgiSM+QT+qfIb+Y8p/RY9v1w1h3+zKMJXjhIppbA==
"@tauri-apps/cli-darwin-x64@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.0.0-beta.22.tgz#029fc107abaa9ede26f5fa3f0949fbf8eaf3667c"
integrity sha512-/lWIixo7WgmMUqcxlPT7Ojlkl6qbVlNDwUZ+9DtTpoWnaaBxv/YpSe1k62vDWEC7l0apFY+Fz7cRONN2wglFyQ==
"@tauri-apps/cli-darwin-x64@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.0.0-beta.13.tgz#50fee7410ef12e705aa446be7f640b7da504c0e3"
integrity sha512-DNqvRzlrH0ZEo+MxdbJIFOYGPCI7iVXzPxSRU+WFz9aa388fZSVEw9jWer5WaAR5FBgp3bDjrkjPuejSb2A8fw==
"@tauri-apps/cli-linux-arm-gnueabihf@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.0.0-beta.22.tgz#5913e0a3c062e22974f0351f5e10c9fd4f66d33c"
integrity sha512-9nJCSStoxu4BKaKVJhu/uBJ8IsIofwAdsX0TWFxqo0obaZbeQSEpPhVsCy+uk3u/28dF+qyUtMCYawO2Uljnag==
"@tauri-apps/cli-linux-arm-gnueabihf@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.0.0-beta.13.tgz#209af191283ec4730fed8fe8e5299a91710e4b38"
integrity sha512-DACLzD8PqgURFBDTnxGODBw/8AP1M5etMrc73dCYs2d4aingc2fVxGYeIQBA0SgijznoCk+pcOmiRsNKO6gemw==
"@tauri-apps/cli-linux-arm64-gnu@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.0.0-beta.22.tgz#86c2107e12c2f2b1289d74a29ebe810df7bb22aa"
integrity sha512-TF9q9zHFBx9LaG2fJJC+BcpIokOmX1UIniBapndvx3dJmdDiK4F6w2QYKDkrBQVzDzcIducmdo2zNBv17O9tFQ==
"@tauri-apps/cli-linux-arm64-gnu@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.0.0-beta.13.tgz#01064bcc7cad8db596e7b54382f7b8e7a96d60e9"
integrity sha512-qm141KNMD6ZjbtAntEZYqiEbiAD0Y6CQnfzmARM9OAPkHD2vk0rnGWSa87N8lnAA27LVAnKj+nTtt77dRLlaVA==
"@tauri-apps/cli-linux-arm64-musl@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.0.0-beta.22.tgz#8a87be68e3ffc9115052bc8d015a9515c7870969"
integrity sha512-ak/RdmaV7sATQmNOxlpHVlbKlrdquH7WH8nOv82X+iK+1HgAOGGqLqBUMzzhkGqo9SHQ9zJ6A2yOo7Z6TJXMmQ==
"@tauri-apps/cli-linux-arm64-musl@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.0.0-beta.13.tgz#1974a209612fd2bbd2b66dc13406f495e5e38dbd"
integrity sha512-AnB+FaqnKfGszStoj7NFZyxMV3Dz4jJcTcCE+EUYJ8Tctah35EJS/39ykskXjXonhxzg8Zr7joXRUVgGFk/yVA==
"@tauri-apps/cli-linux-x64-gnu@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.0.0-beta.22.tgz#108aaa4a939de0ea7567974c627b8061b88be091"
integrity sha512-9t+jQeMqBdXz51ikTh1PQFG/gs9PBzXmtMcIzUxE0juvH/ynjw0Vf+yZbNmwqVS9g7cj8XiBXoc6/N41SZE2cA==
"@tauri-apps/cli-linux-x64-gnu@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.0.0-beta.13.tgz#55fbbb3fe12c1c6fe1e4cae6dce055dcb23a522b"
integrity sha512-do+H48Sq/CJPRCSj7aK4j+QXi5OLbqmVt3YUB7H/krH4PFobveIhm2UpEwTjdEWO2tFTCttj07GD/OYxDhzD/g==
"@tauri-apps/cli-linux-x64-musl@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.0.0-beta.22.tgz#2ea7740c86dc2fc83ed5dfd3f887b0ab76545be4"
integrity sha512-PemcztfHG3HAuuo7HcnhfDrtN9NT7kueyNg8ipxJNPMa+s4K7kfieViyEiMW5pTr2F5WG/UuBSNcuwY+DVCcPA==
"@tauri-apps/cli-linux-x64-musl@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.0.0-beta.13.tgz#67b0f6859072ec297b39dfb5bdf76fc897cfe6f4"
integrity sha512-txkn8CAb8/n6vOHvuXhUBKBJFAip6dF11qqK1lcpsgpNdv1UbvpZYYbjEd8y4jWyjN7OEoIseTtzFzXdezycDw==
"@tauri-apps/cli-win32-arm64-msvc@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.0.0-beta.22.tgz#c378ff68b99aa1afa17a583e4e7a0b16339282b7"
integrity sha512-EgKoG/jGEtTzhOp7ISjMdQsfd8IOG/5yZhO9Z4L/u7oB9mprKAJohYs24+ZxJtq2bOz4f/ZIysZ19nbkpxUzrg==
"@tauri-apps/cli-win32-arm64-msvc@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.0.0-beta.13.tgz#96de65e90b29545ac3825ed51c1fd22648a40299"
integrity sha512-SKa+qiZQ0+JXTHYtZKJw6RuUoolI/GU7E7pTHfkhYpGFO8UXLpTABkQ0KbN0RK0Bw/MOFFVqnAN2AoXLgPUDEA==
"@tauri-apps/cli-win32-ia32-msvc@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.0.0-beta.22.tgz#88a81f2c6dbf62ea18252a542d45eb4190b4ed6e"
integrity sha512-67OrM2m4FB3KujPbjd/i+9lqcLDO3/ixqL1GMc3BoHhcjF+7QY08OxqWeitdsP/8ihnMIIdir2xEjNUKc6Zelw==
"@tauri-apps/cli-win32-ia32-msvc@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.0.0-beta.13.tgz#ff3fe14c68abed27810d727a6d7e78c7f172785c"
integrity sha512-4i9MK2mxNVF2Y1Wp6r/73Xhpevaz1sXD1DezfCDC8Fdszxo2IhkIZ0AYF5/M+TnSLyJk2u5TtFCnbaOt5e4gCg==
"@tauri-apps/cli-win32-x64-msvc@2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.0.0-beta.22.tgz#eb156270431382b64f8d95f0e86a72a6311a3b6f"
integrity sha512-BsO5xMUxliTZTImXnOC73sKT2U9VUeqR8AtklSObBcAg5LaZKpYOdF2pZzU6rIMAZwzROTAT1hYsr4r/nx2UZg==
"@tauri-apps/cli-win32-x64-msvc@2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.0.0-beta.13.tgz#adac497f4cb289f147678ceced59102ae4d2696a"
integrity sha512-aQRwG/dc9zScIzCst646uyprppxc1Gx4jFJUw4yAEikO32SOS+90c8NFEj6H3HtZBmhzfI3JDxrGJl7ORAOCCQ==
"@tauri-apps/cli@==2.0.0-beta.22":
version "2.0.0-beta.22"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-2.0.0-beta.22.tgz#a8c58568db903271b767ecec689bdf7632fbc8cf"
integrity sha512-OAuiDdSRKxNmr/dseQKKMoZZxIhQ6aAxmXJctGYJxCnkd62tQ8xeq87roVXGNS5Qkuv7WpySAyR0ntiMjvNLUA==
"@tauri-apps/cli@==2.0.0-beta.13":
version "2.0.0-beta.13"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-2.0.0-beta.13.tgz#59f410c680cce0707675c78a745bf5c6faa1a6a3"
integrity sha512-Kp0zSvrhXrOQL+8evRMJufnDYDinWXaBb1Un8x4cptrM0GAKjYddV4vjNsXvEyjlXv0S+SWJD0OUNHQyMDUlAg==
optionalDependencies:
"@tauri-apps/cli-darwin-arm64" "2.0.0-beta.22"
"@tauri-apps/cli-darwin-x64" "2.0.0-beta.22"
"@tauri-apps/cli-linux-arm-gnueabihf" "2.0.0-beta.22"
"@tauri-apps/cli-linux-arm64-gnu" "2.0.0-beta.22"
"@tauri-apps/cli-linux-arm64-musl" "2.0.0-beta.22"
"@tauri-apps/cli-linux-x64-gnu" "2.0.0-beta.22"
"@tauri-apps/cli-linux-x64-musl" "2.0.0-beta.22"
"@tauri-apps/cli-win32-arm64-msvc" "2.0.0-beta.22"
"@tauri-apps/cli-win32-ia32-msvc" "2.0.0-beta.22"
"@tauri-apps/cli-win32-x64-msvc" "2.0.0-beta.22"
"@tauri-apps/cli-darwin-arm64" "2.0.0-beta.13"
"@tauri-apps/cli-darwin-x64" "2.0.0-beta.13"
"@tauri-apps/cli-linux-arm-gnueabihf" "2.0.0-beta.13"
"@tauri-apps/cli-linux-arm64-gnu" "2.0.0-beta.13"
"@tauri-apps/cli-linux-arm64-musl" "2.0.0-beta.13"
"@tauri-apps/cli-linux-x64-gnu" "2.0.0-beta.13"
"@tauri-apps/cli-linux-x64-musl" "2.0.0-beta.13"
"@tauri-apps/cli-win32-arm64-msvc" "2.0.0-beta.13"
"@tauri-apps/cli-win32-ia32-msvc" "2.0.0-beta.13"
"@tauri-apps/cli-win32-x64-msvc" "2.0.0-beta.13"
"@tauri-apps/plugin-dialog@^2.0.0-beta.6":
version "2.0.0-beta.6"