Compare commits

..

1 Commits

Author SHA1 Message Date
bfefa0f51a Animate using a KCL function 2025-06-24 16:01:36 -04:00
210 changed files with 2537 additions and 3476 deletions

View File

@ -43,7 +43,7 @@ jobs:
- name: Download Wasm Cache
id: download-wasm
if: ${{ github.event_name == 'pull_request' && steps.filter.outputs.rust == 'false' }}
uses: dawidd6/action-download-artifact@v11
uses: dawidd6/action-download-artifact@v7
continue-on-error: true
with:
github_token: ${{secrets.GITHUB_TOKEN}}
@ -362,7 +362,7 @@ jobs:
- name: Authenticate to Google Cloud
if: ${{ env.IS_STAGING == 'true' }}
uses: 'google-github-actions/auth@v2.1.10'
uses: 'google-github-actions/auth@v2.1.8'
with:
credentials_json: '${{ secrets.GOOGLE_CLOUD_DL_SA }}'

View File

@ -25,8 +25,8 @@ jobs:
- runner=8cpu-linux-x64
- extras=s3-cache
steps:
- uses: runs-on/action@v2
- uses: actions/create-github-app-token@v2
- uses: runs-on/action@v1
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
@ -149,8 +149,8 @@ jobs:
partitionIndex: [1, 2, 3, 4, 5, 6]
partitionTotal: [6]
steps:
- uses: runs-on/action@v2
- uses: actions/create-github-app-token@v2
- uses: runs-on/action@v1
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
@ -207,8 +207,8 @@ jobs:
- runner=32cpu-linux-x64
- extras=s3-cache
steps:
- uses: runs-on/action@v2
- uses: actions/create-github-app-token@v2
- uses: runs-on/action@v1
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}

View File

@ -46,7 +46,7 @@ jobs:
- name: Download Wasm cache
id: download-wasm
if: ${{ github.event_name != 'schedule' && steps.filter.outputs.rust == 'false' }}
uses: dawidd6/action-download-artifact@v11
uses: dawidd6/action-download-artifact@v7
continue-on-error: true
with:
github_token: ${{secrets.GITHUB_TOKEN}}
@ -110,7 +110,7 @@ jobs:
steps:
- uses: actions/create-github-app-token@v2
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
@ -230,7 +230,7 @@ jobs:
steps:
- uses: actions/create-github-app-token@v2
- uses: actions/create-github-app-token@v1
id: app-token
with:
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}

View File

@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/create-github-app-token@v2
- uses: actions/create-github-app-token@v1
id: app-token
with:
# required

View File

@ -328,7 +328,7 @@ jobs:
mkdir -p releases/language-server/${{ env.TAG }}
cp -r build/* releases/language-server/${{ env.TAG }}
- name: "Authenticate to Google Cloud"
uses: "google-github-actions/auth@v2.1.10"
uses: "google-github-actions/auth@v2.1.8"
with:
credentials_json: "${{ secrets.GOOGLE_CLOUD_DL_SA }}"
- name: Set up Cloud SDK

View File

@ -113,7 +113,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v5
- uses: actions-rust-lang/setup-rust-toolchain@v1
- uses: taiki-e/install-action@just
- name: Run tests
@ -130,7 +130,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v5
- name: Install codespell
run: |
uv venv .venv
@ -161,7 +161,7 @@ jobs:
with:
path: rust/kcl-python-bindings
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v6
uses: astral-sh/setup-uv@v5
- name: do uv things
run: |
cd rust/kcl-python-bindings

View File

@ -108,7 +108,7 @@ jobs:
run: npm run files:set-notes
- name: Authenticate to Google Cloud
uses: 'google-github-actions/auth@v2.1.10'
uses: 'google-github-actions/auth@v2.1.8'
with:
credentials_json: '${{ secrets.GOOGLE_CLOUD_DL_SA }}'

View File

@ -62,10 +62,7 @@ else
endif
public/kcl-samples/manifest.json: $(KCL_SOURCES)
ifndef WINDOWS
cd rust/kcl-lib && EXPECTORATE=overwrite cargo test generate_manifest
@ touch $@
endif
.vite/build/main.js: $(REACT_SOURCES) $(TYPESCRIPT_SOURCES) $(VITE_SOURCES)
npm run tronb:vite:dev

View File

@ -12,7 +12,7 @@ test.describe('Point and click for boolean workflows', () => {
},
{
name: 'subtract',
code: 'subtract(extrude001, tools = extrude006)',
code: 'subtract([extrude001], tools = [extrude006])',
},
{
name: 'intersect',
@ -81,8 +81,6 @@ test.describe('Point and click for boolean workflows', () => {
if (operationName !== 'subtract') {
// should down shift key to select multiple objects
await page.keyboard.down('Shift')
} else {
await cmdBar.progressCmdBar()
}
// Select second object
@ -105,8 +103,8 @@ test.describe('Point and click for boolean workflows', () => {
await cmdBar.expectState({
stage: 'review',
headerArguments: {
Solids: '1 path',
Tools: '1 path',
Tool: '1 path',
Target: '1 path',
},
commandName,
})

View File

@ -5,7 +5,6 @@ import { uuidv4 } from '@src/lib/utils'
import type { HomePageFixture } from '@e2e/playwright/fixtures/homePageFixture'
import type { SceneFixture } from '@e2e/playwright/fixtures/sceneFixture'
import type { ToolbarFixture } from '@e2e/playwright/fixtures/toolbarFixture'
import type { CmdBarFixture } from '@e2e/playwright/fixtures/cmdBarFixture'
import { getUtils } from '@e2e/playwright/test-utils'
import { expect, test } from '@e2e/playwright/zoo-test'
@ -15,18 +14,13 @@ test.describe('Can create sketches on all planes and their back sides', () => {
homePage: HomePageFixture,
scene: SceneFixture,
toolbar: ToolbarFixture,
cmdBar: CmdBarFixture,
plane: string,
clickCoords: { x: number; y: number }
) => {
const u = await getUtils(page)
// await page.addInitScript(() => {
// localStorage.setItem('persistCode', '@settings(defaultLengthUnit = in)')
// })
await page.setBodyDimensions({ width: 1200, height: 500 })
await homePage.goToModelingScene()
// await scene.settled(cmdBar)
const XYPlanRed: [number, number, number] = [98, 50, 51]
await scene.expectPixelColor(XYPlanRed, { x: 700, y: 300 }, 15)
@ -125,166 +119,12 @@ test.describe('Can create sketches on all planes and their back sides', () => {
]
for (const config of planeConfigs) {
test(config.plane, async ({ page, homePage, scene, toolbar, cmdBar }) => {
test(config.plane, async ({ page, homePage, scene, toolbar }) => {
await sketchOnPlaneAndBackSideTest(
page,
homePage,
scene,
toolbar,
cmdBar,
config.plane,
config.coords
)
})
}
})
test.describe('Can create sketches on offset planes and their back sides', () => {
const sketchOnPlaneAndBackSideTest = async (
page: Page,
homePage: HomePageFixture,
scene: SceneFixture,
toolbar: ToolbarFixture,
cmdbar: CmdBarFixture,
plane: string,
clickCoords: { x: number; y: number }
) => {
const u = await getUtils(page)
await page.addInitScript(() => {
localStorage.setItem(
'persistCode',
`@settings(defaultLengthUnit = in)
xyPlane = offsetPlane(XY, offset = 0.05)
xzPlane = offsetPlane(XZ, offset = 0.05)
yzPlane = offsetPlane(YZ, offset = 0.05)
`
)
})
await page.setBodyDimensions({ width: 1200, height: 500 })
await homePage.goToModelingScene()
// await scene.settled(cmdbar)
const XYPlanRed: [number, number, number] = [74, 74, 74]
await scene.expectPixelColor(XYPlanRed, { x: 700, y: 300 }, 15)
await u.openDebugPanel()
const coord =
plane === '-XY' || plane === '-YZ' || plane === 'XZ' ? -100 : 100
const camCommand: EngineCommand = {
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_look_at',
center: { x: 0, y: 0, z: 0 },
vantage: { x: coord, y: coord, z: coord },
up: { x: 0, y: 0, z: 1 },
},
}
const updateCamCommand: EngineCommand = {
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'default_camera_get_settings',
},
}
const prefix = plane.length === 3 ? '-' : ''
const planeName = plane
.slice(plane.length === 3 ? 1 : 0)
.toLocaleLowerCase()
const codeLine1 = `sketch001 = startSketchOn(${prefix}${planeName}Plane)`
const codeLine2 = `profile001 = startProfile(sketch001, at = [${0.91 + (plane[0] === '-' ? 0.01 : 0)}, -${1.21 + (plane[0] === '-' ? 0.01 : 0)}])`
await u.openDebugPanel()
await u.clearCommandLogs()
await page.getByRole('button', { name: 'Start Sketch' }).click()
await u.sendCustomCmd(camCommand)
await page.waitForTimeout(100)
await u.sendCustomCmd(updateCamCommand)
await u.closeDebugPanel()
await toolbar.openFeatureTreePane()
await toolbar.getDefaultPlaneVisibilityButton('XY').click()
await toolbar.getDefaultPlaneVisibilityButton('XZ').click()
await toolbar.getDefaultPlaneVisibilityButton('YZ').click()
await expect(
toolbar
.getDefaultPlaneVisibilityButton('YZ')
.locator('[aria-label="eye crossed out"]')
).toBeVisible()
await page.mouse.click(clickCoords.x, clickCoords.y)
await page.waitForTimeout(600) // wait for animation
await toolbar.waitUntilSketchingReady()
await expect(
page.getByRole('button', { name: 'line Line', exact: true })
).toBeVisible()
await u.closeDebugPanel()
await page.mouse.click(707, 393)
await expect(page.locator('.cm-content')).toContainText(codeLine1)
await expect(page.locator('.cm-content')).toContainText(codeLine2)
await page
.getByRole('button', { name: 'line Line', exact: true })
.first()
.click()
await u.openAndClearDebugPanel()
await page.getByRole('button', { name: 'Exit Sketch' }).click()
await u.expectCmdLog('[data-message-type="execution-done"]')
await u.clearCommandLogs()
await u.removeCurrentCode()
}
const planeConfigs = [
{
plane: 'XY',
coords: { x: 600, y: 388 },
description: 'red plane',
},
{
plane: 'YZ',
coords: { x: 700, y: 250 },
description: 'green plane',
},
{
plane: 'XZ',
coords: { x: 684, y: 427 },
description: 'blue plane',
},
{
plane: '-XY',
coords: { x: 600, y: 118 },
description: 'back of red plane',
},
{
plane: '-YZ',
coords: { x: 700, y: 219 },
description: 'back of green plane',
},
{
plane: '-XZ',
coords: { x: 700, y: 80 },
description: 'back of blue plane',
},
]
for (const config of planeConfigs) {
test(config.plane, async ({ page, homePage, scene, toolbar, cmdBar }) => {
await sketchOnPlaneAndBackSideTest(
page,
homePage,
scene,
toolbar,
cmdBar,
config.plane,
config.coords
)

View File

@ -525,9 +525,7 @@ test.describe('Command bar tests', () => {
const projectName = 'test'
const beforeKclCode = `a = 5
b = a * a
c = 3 + a
theta = 45deg
`
c = 3 + a`
await context.folderSetupFn(async (dir) => {
const testProject = join(dir, projectName)
await fsp.mkdir(testProject, { recursive: true })
@ -617,45 +615,9 @@ theta = 45deg
stage: 'commandBarClosed',
})
})
await test.step(`Edit a parameter with explicit units via command bar`, async () => {
await cmdBar.cmdBarOpenBtn.click()
await cmdBar.chooseCommand('edit parameter')
await cmdBar
.selectOption({
name: 'theta',
})
.click()
await cmdBar.expectState({
stage: 'arguments',
commandName: 'Edit parameter',
currentArgKey: 'value',
currentArgValue: '45deg',
headerArguments: {
Name: 'theta',
Value: '',
},
highlightedHeaderArg: 'value',
})
await cmdBar.argumentInput
.locator('[contenteditable]')
.fill('45deg + 1deg')
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
commandName: 'Edit parameter',
headerArguments: {
Name: 'theta',
Value: '46deg',
},
})
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'commandBarClosed',
})
})
await editor.expectEditor.toContain(
`a = 5b = a * amyParameter001 = ${newValue}c = 3 + atheta = 45deg + 1deg`
`a = 5b = a * amyParameter001 = ${newValue}c = 3 + a`
)
})

View File

@ -288,7 +288,7 @@ a1 = startSketchOn(offsetPlane(XY, offset = 10))
// error text on hover
await page.hover('.cm-lint-marker-info')
await expect(
page.getByText('Identifiers should be lowerCamelCase').first()
page.getByText('Identifiers must be lowerCamelCase').first()
).toBeVisible()
await page.locator('#code-pane button:first-child').click()
@ -314,7 +314,7 @@ sketch_001 = startSketchOn(XY)
// error text on hover
await page.hover('.cm-lint-marker-info')
await expect(
page.getByText('Identifiers should be lowerCamelCase').first()
page.getByText('Identifiers must be lowerCamelCase').first()
).toBeVisible()
})
@ -511,7 +511,7 @@ sketch_001 = startSketchOn(XY)
// error text on hover
await page.hover('.cm-lint-marker-info')
await expect(
page.getByText('Identifiers should be lowerCamelCase').first()
page.getByText('Identifiers must be lowerCamelCase').first()
).toBeVisible()
// focus the editor
@ -539,7 +539,7 @@ sketch_001 = startSketchOn(XY)
// error text on hover
await page.hover('.cm-lint-marker-info')
await expect(
page.getByText('Identifiers should be lowerCamelCase').first()
page.getByText('Identifiers must be lowerCamelCase').first()
).toBeVisible()
})
@ -681,7 +681,7 @@ a1 = startSketchOn(offsetPlane(XY, offset = 10))
// error text on hover
await page.hover('.cm-lint-marker-info')
await expect(
page.getByText('Identifiers should be lowerCamelCase').first()
page.getByText('Identifiers must be lowerCamelCase').first()
).toBeVisible()
// select the line that's causing the error and delete it

View File

@ -274,13 +274,6 @@ export class ToolbarFixture {
.nth(operationIndex)
}
getDefaultPlaneVisibilityButton(plane: 'XY' | 'XZ' | 'YZ' = 'XY') {
const index = plane === 'XZ' ? 0 : plane === 'XY' ? 1 : 2
return this.featureTreePane
.getByTestId('feature-tree-visibility-toggle')
.nth(index)
}
/**
* View source on a specific operation in the Feature Tree pane.
* @param operationName The name of the operation type

View File

@ -7,7 +7,6 @@ import type { SceneFixture } from '@e2e/playwright/fixtures/sceneFixture'
import type { ToolbarFixture } from '@e2e/playwright/fixtures/toolbarFixture'
import { expect, test } from '@e2e/playwright/zoo-test'
import { bracket } from '@e2e/playwright/fixtures/bracket'
import type { CmdBarSerialised } from '@e2e/playwright/fixtures/cmdBarFixture'
// test file is for testing point an click code gen functionality that's not sketch mode related
@ -136,17 +135,17 @@ test.describe('Point-and-click tests', () => {
highlightedHeaderArg: 'length',
commandName: 'Extrude',
})
await page.keyboard.insertText('width - 0.001in')
await page.keyboard.insertText('width - 0.001')
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
headerArguments: {
Length: '4.999in',
Length: '4.999',
},
commandName: 'Extrude',
})
await cmdBar.progressCmdBar()
await editor.expectEditor.toContain('extrude(length = width - 0.001in)')
await editor.expectEditor.toContain('extrude(length = width - 0.001)')
})
await test.step(`Edit second extrude via feature tree`, async () => {
@ -1142,20 +1141,6 @@ openSketch = startSketchOn(XY)
})
})
const initialCmdBarStateHelix: CmdBarSerialised = {
stage: 'arguments',
currentArgKey: 'mode',
currentArgValue: '',
headerArguments: {
Mode: '',
AngleStart: '',
Revolutions: '',
Radius: '',
},
highlightedHeaderArg: 'mode',
commandName: 'Helix',
}
test('Helix point-and-click on default axis', async ({
context,
page,
@ -1165,14 +1150,30 @@ openSketch = startSketchOn(XY)
toolbar,
cmdBar,
}) => {
const expectedOutput = `helix001 = helix( axis = X, radius = 5, length = 5, revolutions = 1, angleStart = 270,)`
// One dumb hardcoded screen pixel value
const testPoint = { x: 620, y: 257 }
const expectedOutput = `helix001 = helix( axis = X, radius = 5, length = 5, revolutions = 1, angleStart = 270, ccw = false,)`
const expectedLine = `axis=X,`
await homePage.goToModelingScene()
await scene.connectionEstablished()
await test.step(`Go through the command bar flow`, async () => {
await toolbar.helixButton.click()
await cmdBar.expectState(initialCmdBarStateHelix)
await cmdBar.expectState({
stage: 'arguments',
currentArgKey: 'mode',
currentArgValue: '',
headerArguments: {
Mode: '',
AngleStart: '',
Revolutions: '',
Radius: '',
CounterClockWise: '',
},
highlightedHeaderArg: 'mode',
commandName: 'Helix',
})
await cmdBar.progressCmdBar()
await expect.poll(() => page.getByText('Axis').count()).toBe(6)
await cmdBar.progressCmdBar()
@ -1189,6 +1190,7 @@ openSketch = startSketchOn(XY)
AngleStart: '',
Length: '',
Radius: '',
CounterClockWise: '',
},
commandName: 'Helix',
})
@ -1205,10 +1207,11 @@ openSketch = startSketchOn(XY)
Revolutions: '1',
Length: '5',
Radius: '5',
CounterClockWise: '',
},
commandName: 'Helix',
})
await cmdBar.submit()
await cmdBar.progressCmdBar()
})
await test.step(`Confirm code is added to the editor, scene has changed`, async () => {
@ -1218,6 +1221,8 @@ openSketch = startSketchOn(XY)
activeLines: [expectedLine],
highlightedCode: '',
})
// Red plane is now gone, white helix is there
await scene.expectPixelColor([250, 250, 250], testPoint, 15)
})
await test.step(`Edit helix through the feature tree`, async () => {
@ -1229,18 +1234,21 @@ openSketch = startSketchOn(XY)
await cmdBar.expectState({
commandName: 'Helix',
stage: 'arguments',
currentArgKey: 'length',
currentArgValue: '5',
currentArgKey: 'CounterClockWise',
currentArgValue: '',
headerArguments: {
Axis: 'X',
AngleStart: '270',
Revolutions: '1',
Radius: '5',
Length: initialInput,
CounterClockWise: '',
},
highlightedHeaderArg: 'length',
highlightedHeaderArg: 'CounterClockWise',
})
await page.keyboard.insertText(newInput)
await page.keyboard.press('Shift+Backspace')
await expect(cmdBar.currentArgumentInput).toBeVisible()
await cmdBar.currentArgumentInput.locator('.cm-content').fill(newInput)
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
@ -1250,10 +1258,11 @@ openSketch = startSketchOn(XY)
Revolutions: '1',
Radius: '5',
Length: newInput,
CounterClockWise: '',
},
commandName: 'Helix',
})
await cmdBar.submit()
await cmdBar.progressCmdBar()
await toolbar.closeFeatureTreePane()
await editor.openPane()
await editor.expectEditor.toContain('length = ' + newInput)
@ -1264,81 +1273,28 @@ openSketch = startSketchOn(XY)
const operationButton = await toolbar.getFeatureTreeOperation('Helix', 0)
await operationButton.click({ button: 'left' })
await page.keyboard.press('Delete')
await scene.settled(cmdBar)
await editor.expectEditor.not.toContain('helix')
await expect(
await toolbar.getFeatureTreeOperation('Helix', 0)
).not.toBeVisible()
// Red plane is back
await scene.expectPixelColor([96, 52, 52], testPoint, 15)
})
})
test(`Helix point-and-click around segment`, async ({
context,
page,
homePage,
scene,
editor,
toolbar,
cmdBar,
}) => {
const initialCode = `sketch001 = startSketchOn(XZ)
profile001 = startProfile(sketch001, at = [0, 0])
|> yLine(length = 100)
|> line(endAbsolute = [100, 0])
|> line(endAbsolute = [profileStartX(%), profileStartY(%)])
|> close()`
await context.addInitScript((initialCode) => {
localStorage.setItem('persistCode', initialCode)
}, initialCode)
await page.setBodyDimensions({ width: 1000, height: 500 })
await homePage.goToModelingScene()
await scene.settled(cmdBar)
await test.step(`Go through the command bar flow`, async () => {
await toolbar.closePane('code')
await toolbar.helixButton.click()
await cmdBar.expectState(initialCmdBarStateHelix)
await cmdBar.selectOption({ name: 'Edge' }).click()
await editor.selectText('yLine(length = 100)')
await cmdBar.progressCmdBar()
await page.keyboard.insertText('1')
await cmdBar.progressCmdBar()
await page.keyboard.insertText('2')
await cmdBar.progressCmdBar()
await page.keyboard.insertText('3')
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
headerArguments: {
Mode: 'Edge',
Edge: `1 segment`,
AngleStart: '2',
Revolutions: '1',
Radius: '3',
const helixCases = [
{
selectionType: 'segment',
testPoint: { x: 513, y: 221 },
expectedOutput: `helix001 = helix( axis = seg01, radius = 1, revolutions = 20, angleStart = 0, ccw = false,)`,
expectedEditedOutput: `helix001 = helix( axis = seg01, radius = 5, revolutions = 20, angleStart = 0, ccw = false,)`,
},
commandName: 'Helix',
})
await cmdBar.submit()
await scene.settled(cmdBar)
})
await test.step(`Confirm code is added to the editor, scene has changed`, async () => {
await toolbar.openPane('code')
await editor.expectEditor.toContain(
`
helix001 = helix(
axis = seg01,
radius = 3,
revolutions = 1,
angleStart = 2,
)`,
{ shouldNormalise: true }
)
await toolbar.closePane('code')
})
})
test(`Helix point-and-click around sweepEdge with edit and delete flows`, async ({
{
selectionType: 'sweepEdge',
testPoint: { x: 564, y: 364 },
expectedOutput: `helix001 = helix( axis = getOppositeEdge(seg01), radius = 1, revolutions = 20, angleStart = 0, ccw = false,)`,
expectedEditedOutput: `helix001 = helix( axis = getOppositeEdge(seg01), radius = 5, revolutions = 20, angleStart = 0, ccw = false,)`,
},
]
helixCases.map(
({ selectionType, testPoint, expectedOutput, expectedEditedOutput }) => {
test(`Helix point-and-click around ${selectionType}`, async ({
context,
page,
homePage,
@ -1347,6 +1303,7 @@ profile001 = startProfile(sketch001, at = [0, 0])
toolbar,
cmdBar,
}) => {
page.on('console', console.log)
const initialCode = `sketch001 = startSketchOn(XZ)
profile001 = startProfile(sketch001, at = [0, 0])
|> yLine(length = 100)
@ -1355,8 +1312,7 @@ profile001 = startProfile(sketch001, at = [0, 0])
|> close()
extrude001 = extrude(profile001, length = 100)`
// One dumb hardcoded screen pixel value to click on the sweepEdge, can't think of another way?
const testPoint = { x: 564, y: 364 }
// One dumb hardcoded screen pixel value
const [clickOnEdge] = scene.makeMouseHelpers(testPoint.x, testPoint.y)
await context.addInitScript((initialCode) => {
@ -1369,14 +1325,30 @@ extrude001 = extrude(profile001, length = 100)`
await test.step(`Go through the command bar flow`, async () => {
await toolbar.closePane('code')
await toolbar.helixButton.click()
await cmdBar.expectState(initialCmdBarStateHelix)
await cmdBar.expectState({
stage: 'arguments',
currentArgKey: 'mode',
currentArgValue: '',
headerArguments: {
AngleStart: '',
Mode: '',
CounterClockWise: '',
Radius: '',
Revolutions: '',
},
highlightedHeaderArg: 'mode',
commandName: 'Helix',
})
await cmdBar.selectOption({ name: 'Edge' }).click()
await expect
.poll(() => page.getByText('Please select one').count())
.toBe(1)
await clickOnEdge()
await page.waitForTimeout(1000)
await cmdBar.progressCmdBar()
await page.waitForTimeout(1000)
await cmdBar.argumentInput.focus()
await page.waitForTimeout(1000)
await page.keyboard.insertText('20')
await cmdBar.progressCmdBar()
await page.keyboard.insertText('0')
@ -1388,62 +1360,33 @@ extrude001 = extrude(profile001, length = 100)`
stage: 'review',
headerArguments: {
Mode: 'Edge',
Edge: `1 sweepEdge`,
Edge: `1 ${selectionType}`,
AngleStart: '0',
Revolutions: '20',
Radius: '1',
CounterClockWise: '',
},
commandName: 'Helix',
})
await cmdBar.submit()
await scene.settled(cmdBar)
await cmdBar.progressCmdBar()
await page.waitForTimeout(1000)
})
await test.step(`Confirm code is added to the editor, scene has changed`, async () => {
await toolbar.openPane('code')
await editor.expectEditor.toContain(
`
helix001 = helix(
axis = getOppositeEdge(seg01),
radius = 1,
revolutions = 20,
angleStart = 0,
)`,
{ shouldNormalise: true }
)
await editor.expectEditor.toContain(expectedOutput)
await toolbar.closePane('code')
})
await test.step(`Edit helix through the feature tree`, async () => {
await toolbar.openPane('feature-tree')
const operationButton = await toolbar.getFeatureTreeOperation('Helix', 0)
const operationButton = await toolbar.getFeatureTreeOperation(
'Helix',
0
)
await operationButton.dblclick()
const initialInput = '1'
const newInput = '5'
await cmdBar.expectState({
commandName: 'Helix',
stage: 'arguments',
currentArgKey: 'radius',
currentArgValue: initialInput,
headerArguments: {
AngleStart: '0',
Revolutions: '20',
Radius: initialInput,
},
highlightedHeaderArg: 'radius',
})
await page.keyboard.insertText(newInput)
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
headerArguments: {
AngleStart: '0',
Revolutions: '20',
Radius: newInput,
},
commandName: 'Helix',
})
await cmdBar.clickOptionalArgument('ccw')
await cmdBar.expectState({
commandName: 'Helix',
stage: 'arguments',
@ -1452,12 +1395,19 @@ extrude001 = extrude(profile001, length = 100)`
headerArguments: {
AngleStart: '0',
Revolutions: '20',
Radius: newInput,
Radius: initialInput,
CounterClockWise: '',
},
highlightedHeaderArg: 'CounterClockWise',
})
await cmdBar.selectOption({ name: 'True' }).click()
await page
.getByRole('button', { name: 'radius', exact: false })
.click()
await expect(cmdBar.currentArgumentInput).toBeVisible()
await cmdBar.currentArgumentInput
.locator('.cm-content')
.fill(newInput)
await cmdBar.progressCmdBar()
await cmdBar.expectState({
stage: 'review',
headerArguments: {
@ -1468,34 +1418,29 @@ extrude001 = extrude(profile001, length = 100)`
},
commandName: 'Helix',
})
await cmdBar.submit()
await cmdBar.progressCmdBar()
await toolbar.closePane('feature-tree')
await toolbar.openPane('code')
await editor.expectEditor.toContain(
`
helix001 = helix(
axis = getOppositeEdge(seg01),
radius = 5,
revolutions = 20,
angleStart = 0,
ccw = true,
)`,
{ shouldNormalise: true }
)
await editor.expectEditor.toContain(expectedEditedOutput)
await toolbar.closePane('code')
})
await test.step('Delete helix via feature tree selection', async () => {
await toolbar.openPane('feature-tree')
const operationButton = await toolbar.getFeatureTreeOperation('Helix', 0)
const operationButton = await toolbar.getFeatureTreeOperation(
'Helix',
0
)
await operationButton.click({ button: 'left' })
await page.keyboard.press('Delete')
await editor.expectEditor.not.toContain('helix')
await editor.expectEditor.not.toContain(expectedEditedOutput)
await expect(
await toolbar.getFeatureTreeOperation('Helix', 0)
).not.toBeVisible()
})
})
}
)
test('Helix point-and-click on cylinder', async ({
context,
@ -1525,12 +1470,26 @@ extrude001 = extrude(profile001, length = 100)
// One dumb hardcoded screen pixel value
const testPoint = { x: 620, y: 257 }
const [clickOnWall] = scene.makeMouseHelpers(testPoint.x, testPoint.y)
const expectedOutput = `helix001 = helix(cylinder = extrude001, revolutions = 1, angleStart = 360)`
const expectedEditedOutput = `helix001 = helix(cylinder = extrude001, revolutions = 1, angleStart = 10)`
const expectedOutput = `helix001 = helix( cylinder = extrude001, revolutions = 1, angleStart = 360, ccw = false,)`
const expectedLine = `cylinder = extrude001,`
const expectedEditedOutput = `helix001 = helix( cylinder = extrude001, revolutions = 1, angleStart = 360, ccw = true,)`
await test.step(`Go through the command bar flow`, async () => {
await toolbar.helixButton.click()
await cmdBar.expectState(initialCmdBarStateHelix)
await cmdBar.expectState({
stage: 'arguments',
currentArgKey: 'mode',
currentArgValue: '',
headerArguments: {
Mode: '',
AngleStart: '',
Revolutions: '',
Radius: '',
CounterClockWise: '',
},
highlightedHeaderArg: 'mode',
commandName: 'Helix',
})
await cmdBar.selectOption({ name: 'Cylinder' }).click()
await cmdBar.expectState({
stage: 'arguments',
@ -1541,6 +1500,7 @@ extrude001 = extrude(profile001, length = 100)
Cylinder: '',
AngleStart: '',
Revolutions: '',
CounterClockWise: '',
},
highlightedHeaderArg: 'cylinder',
commandName: 'Helix',
@ -1556,17 +1516,18 @@ extrude001 = extrude(profile001, length = 100)
Cylinder: '1 face',
AngleStart: '360',
Revolutions: '1',
CounterClockWise: '',
},
commandName: 'Helix',
})
await cmdBar.submit()
await cmdBar.progressCmdBar()
})
await test.step(`Confirm code is added to the editor, scene has changed`, async () => {
await editor.expectEditor.toContain(expectedOutput)
await editor.expectState({
diagnostics: [],
activeLines: [expectedOutput],
activeLines: [expectedLine],
highlightedCode: '',
})
})
@ -1578,21 +1539,22 @@ extrude001 = extrude(profile001, length = 100)
await cmdBar.expectState({
commandName: 'Helix',
stage: 'arguments',
currentArgKey: 'angleStart',
currentArgValue: '360',
currentArgKey: 'CounterClockWise',
currentArgValue: '',
headerArguments: {
AngleStart: '360',
Revolutions: '1',
CounterClockWise: '',
},
highlightedHeaderArg: 'angleStart',
highlightedHeaderArg: 'CounterClockWise',
})
await page.keyboard.insertText('10')
await cmdBar.progressCmdBar()
await cmdBar.selectOption({ name: 'True' }).click()
await cmdBar.expectState({
stage: 'review',
headerArguments: {
AngleStart: '10',
AngleStart: '360',
Revolutions: '1',
CounterClockWise: 'true',
},
commandName: 'Helix',
})

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 58 KiB

18
flake.lock generated
View File

@ -20,11 +20,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1750865895,
"narHash": "sha256-p2dWAQcLVzquy9LxYCZPwyUdugw78Qv3ChvnX755qHA=",
"lastModified": 1745998881,
"narHash": "sha256-vonyYAKJSlsX4n9GCsS0pHxR6yCrfqBIuGvANlkwG6U=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "61c0f513911459945e2cb8bf333dc849f1b976ff",
"rev": "423d2df5b04b4ee7688c3d71396e872afa236a89",
"type": "github"
},
"original": {
@ -36,11 +36,11 @@
},
"nixpkgs_2": {
"locked": {
"lastModified": 1750865895,
"narHash": "sha256-p2dWAQcLVzquy9LxYCZPwyUdugw78Qv3ChvnX755qHA=",
"lastModified": 1745998881,
"narHash": "sha256-vonyYAKJSlsX4n9GCsS0pHxR6yCrfqBIuGvANlkwG6U=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "61c0f513911459945e2cb8bf333dc849f1b976ff",
"rev": "423d2df5b04b4ee7688c3d71396e872afa236a89",
"type": "github"
},
"original": {
@ -78,11 +78,11 @@
"nixpkgs": "nixpkgs_3"
},
"locked": {
"lastModified": 1750964660,
"narHash": "sha256-YQ6EyFetjH1uy5JhdhRdPe6cuNXlYpMAQePFfZj4W7M=",
"lastModified": 1745980514,
"narHash": "sha256-CITAeiuXGjDvT5iZBXr6vKVWQwsUQLJUMFO91bfJFC4=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "04f0fcfb1a50c63529805a798b4b5c21610ff390",
"rev": "7fbdae44b0f40ea432e46fd152ad8be0f8f41ad6",
"type": "github"
},
"original": {

View File

@ -125,7 +125,8 @@ test('Shows a loading spinner when uninitialized credit count', async () => {
await expect(queryByTestId('spinner')).toBeVisible()
})
const unKnownTierData = {
test('Shows the total credits for Unknown subscription', async () => {
const data = {
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 25,
@ -136,46 +137,6 @@ const unKnownTierData = {
}
}
const freeTierData = {
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
monthlyPayAsYouGoApiCreditsTotal: 20,
name: "free",
}
}
const proTierData = {
// These are all ignored
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
// This should be ignored because it's Pro tier.
monthlyPayAsYouGoApiCreditsTotal: 20,
name: "pro",
}
}
const enterpriseTierData = {
// These are all ignored, user is part of an org.
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
// This should be ignored because it's Pro tier.
monthlyPayAsYouGoApiCreditsTotal: 20,
// This should be ignored because the user is part of an Org.
name: "free",
}
}
test('Shows the total credits for Unknown subscription', async () => {
const data = unKnownTierData
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
@ -205,7 +166,17 @@ test('Shows the total credits for Unknown subscription', async () => {
})
test('Progress bar reflects ratio left of Free subscription', async () => {
const data = freeTierData
const data = {
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
monthlyPayAsYouGoApiCreditsTotal: 20,
name: "free",
}
}
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
@ -241,7 +212,19 @@ test('Progress bar reflects ratio left of Free subscription', async () => {
})
})
test('Shows infinite credits for Pro subscription', async () => {
const data = proTierData
const data = {
// These are all ignored
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
// This should be ignored because it's Pro tier.
monthlyPayAsYouGoApiCreditsTotal: 20,
name: "pro",
}
}
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
@ -272,7 +255,19 @@ test('Shows infinite credits for Pro subscription', async () => {
await expect(queryByTestId('billing-remaining-progress-bar-inline')).toBe(null)
})
test('Shows infinite credits for Enterprise subscription', async () => {
const data = enterpriseTierData
const data = {
// These are all ignored, user is part of an org.
balance: {
monthlyApiCreditsRemaining: 10,
stableApiCreditsRemaining: 0,
},
subscriptions: {
// This should be ignored because it's Pro tier.
monthlyPayAsYouGoApiCreditsTotal: 20,
// This should be ignored because the user is part of an Org.
name: "free",
}
}
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
@ -302,58 +297,3 @@ test('Shows infinite credits for Enterprise subscription', async () => {
await expect(queryByTestId('infinity')).toBeVisible()
await expect(queryByTestId('billing-remaining-progress-bar-inline')).toBe(null)
})
test('Show upgrade button if credits are not infinite', async () => {
const data = freeTierData
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
}),
http.get('*/user/payment/subscriptions', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentSubscriptionsResponse(data.subscriptions))
}),
http.get('*/org', (req, res, ctx) => {
return new HttpResponse(403)
}),
)
const billingActor = createActor(billingMachine, { input: BILLING_CONTEXT_DEFAULTS }).start()
const { queryByTestId } = render(<BillingDialog
billingActor={billingActor}
/>)
await act(() => {
billingActor.send({ type: BillingTransition.Update, apiToken: "it doesn't matter wtf this is :)" })
})
await expect(queryByTestId('billing-upgrade-button')).toBeVisible()
})
test('Hide upgrade button if credits are infinite', async () => {
const data = enterpriseTierData
server.use(
http.get('*/user/payment/balance', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
}),
http.get('*/user/payment/subscriptions', (req, res, ctx) => {
return HttpResponse.json(createUserPaymentSubscriptionsResponse(data.subscriptions))
}),
// Ok finally the first use of an org lol
http.get('*/org', (req, res, ctx) => {
return HttpResponse.json(createOrgResponse())
}),
)
const billingActor = createActor(billingMachine, { input: BILLING_CONTEXT_DEFAULTS }).start()
const { queryByTestId } = render(<BillingDialog
billingActor={billingActor}
/>)
await act(() => {
billingActor.send({ type: BillingTransition.Update, apiToken: "it doesn't matter wtf this is :)" })
})
await expect(queryByTestId('billing-upgrade-button')).toBe(null)
})

1118
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,14 +15,14 @@ import "car-tire.kcl" as carTire
import * from "parameters.kcl"
// Place the car rotor
carRotor
rotor = carRotor
|> translate(x = 0, y = 0.5, z = 0)
// Place the car wheel
carWheel
// Place the lug nuts
lugNut
lgnut = lugNut
|> patternCircular3d(
arcDegrees = 360,
axis = [0, 1, 0],
@ -32,8 +32,19 @@ lugNut
)
// Place the brake caliper
brakeCaliper
cal = brakeCaliper
|> translate(x = 0, y = 0.5, z = 0)
// Place the car tire
carTire
fn animate(step: number(_)) {
angle = 0.6deg
rotate(rotor, pitch = angle)
rotate(lgnut, pitch = angle)
rotate(cal, pitch = angle)
rotate(carWheel, pitch = angle)
rotate(carTire, pitch = angle)
return 0
}

View File

@ -369,7 +369,7 @@ profile007 = startProfile(
|> line(%, endAbsolute = [profileStartX(%), profileStartY(%)])
|> close(%)
profile008 = circle(sketch005, center = [0, 0], diameter = nubDiameter)
subtract2d(profile007, tool = profile008)
hourHand = subtract2d(profile007, tool = profile008)
|> extrude(%, length = 5)
|> appearance(%, color = "#404040")
@ -413,7 +413,7 @@ profile009 = startProfile(
|> line(%, endAbsolute = [profileStartX(%), profileStartY(%)])
|> close(%)
profile010 = circle(sketch006, center = [0, 0], diameter = 30)
subtract2d(profile009, tool = profile010)
minuteHand = subtract2d(profile009, tool = profile010)
|> extrude(%, length = 5)
|> appearance(%, color = "#404040")
@ -439,3 +439,8 @@ profile004 = startProfile(sketch003, at = [-slotWidth / 2, 200])
|> extrude(%, length = -20)
// todo: create cavity for the screw to slide into (need csg update)
fn animate(step: number(_)) {
rotate(hourHand, yaw = -0.1deg)
return rotate(minuteHand, yaw = -0.6deg)
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 84 KiB

After

Width:  |  Height:  |  Size: 84 KiB

144
rust/Cargo.lock generated
View File

@ -178,7 +178,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -189,7 +189,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -211,7 +211,7 @@ checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -514,7 +514,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -740,7 +740,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -751,7 +751,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
dependencies = [
"darling_core",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -810,7 +810,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -831,7 +831,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -841,7 +841,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
dependencies = [
"derive_builder_core",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -906,7 +906,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -944,7 +944,7 @@ checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -1119,7 +1119,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -1223,7 +1223,7 @@ dependencies = [
"inflections",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -1599,7 +1599,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -1814,7 +1814,7 @@ dependencies = [
[[package]]
name = "kcl-bumper"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"anyhow",
"clap",
@ -1825,26 +1825,26 @@ dependencies = [
[[package]]
name = "kcl-derive-docs"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
name = "kcl-directory-test-macro"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
name = "kcl-language-server"
version = "0.2.83"
version = "0.2.82"
dependencies = [
"anyhow",
"clap",
@ -1865,7 +1865,7 @@ dependencies = [
[[package]]
name = "kcl-language-server-release"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"anyhow",
"clap",
@ -1885,7 +1885,7 @@ dependencies = [
[[package]]
name = "kcl-lib"
version = "0.2.83"
version = "0.2.82"
dependencies = [
"anyhow",
"approx 0.5.1",
@ -1962,7 +1962,7 @@ dependencies = [
[[package]]
name = "kcl-python-bindings"
version = "0.3.83"
version = "0.3.82"
dependencies = [
"anyhow",
"kcl-lib",
@ -1977,7 +1977,7 @@ dependencies = [
[[package]]
name = "kcl-test-server"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"anyhow",
"hyper 0.14.32",
@ -1990,7 +1990,7 @@ dependencies = [
[[package]]
name = "kcl-to-core"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"anyhow",
"async-trait",
@ -2004,7 +2004,7 @@ dependencies = [
[[package]]
name = "kcl-wasm-lib"
version = "0.1.83"
version = "0.1.82"
dependencies = [
"anyhow",
"bson",
@ -2071,9 +2071,9 @@ dependencies = [
[[package]]
name = "kittycad-modeling-cmds"
version = "0.2.125"
version = "0.2.124"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfd09d95f8bbeb090d4d1137c9bf421eb75763f7a30e4a9e8eefa249ddf20bd3"
checksum = "221aa4670a7ad7dc8f1e4e0f9990bf3cff0a64417eb76493bafe5bbbc1f8350a"
dependencies = [
"anyhow",
"chrono",
@ -2104,7 +2104,7 @@ dependencies = [
"kittycad-modeling-cmds-macros-impl",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2115,7 +2115,7 @@ checksum = "fdb4ee23cc996aa2dca7584d410e8826e08161e1ac4335bb646d5ede33f37cb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2311,7 +2311,7 @@ checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2640,7 +2640,7 @@ dependencies = [
"regex",
"regex-syntax 0.8.5",
"structmeta",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2654,7 +2654,7 @@ dependencies = [
"regex",
"regex-syntax 0.8.5",
"structmeta",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2710,7 +2710,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2754,7 +2754,7 @@ dependencies = [
"phf_shared",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2809,7 +2809,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2921,7 +2921,7 @@ dependencies = [
"proc-macro-error-attr2",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2981,7 +2981,7 @@ dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -2994,7 +2994,7 @@ dependencies = [
"proc-macro2",
"pyo3-build-config",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3492,7 +3492,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3556,7 +3556,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3567,7 +3567,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3591,14 +3591,14 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
name = "serde_spanned"
version = "0.6.9"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
dependencies = [
"serde",
]
@ -3815,7 +3815,7 @@ dependencies = [
"proc-macro2",
"quote",
"structmeta-derive",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3826,7 +3826,7 @@ checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3848,7 +3848,7 @@ dependencies = [
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3891,9 +3891,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.104"
version = "2.0.103"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
dependencies = [
"proc-macro2",
"quote",
@ -3917,7 +3917,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -3941,7 +3941,7 @@ dependencies = [
"proc-macro-error2",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4050,7 +4050,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4061,7 +4061,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4173,7 +4173,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4217,9 +4217,9 @@ dependencies = [
[[package]]
name = "toml"
version = "0.8.23"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae"
dependencies = [
"serde",
"serde_spanned",
@ -4238,9 +4238,9 @@ dependencies = [
[[package]]
name = "toml_edit"
version = "0.22.27"
version = "0.22.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
dependencies = [
"indexmap 2.9.0",
"serde",
@ -4341,7 +4341,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4369,7 +4369,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4449,7 +4449,7 @@ checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
"termcolor",
]
@ -4635,7 +4635,7 @@ dependencies = [
"proc-macro-error2",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -4706,7 +4706,7 @@ dependencies = [
"log",
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
"wasm-bindgen-shared",
]
@ -4742,7 +4742,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -4777,7 +4777,7 @@ checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -5067,7 +5067,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
"synstructure",
]
@ -5112,7 +5112,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -5123,7 +5123,7 @@ checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -5143,7 +5143,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
"synstructure",
]
@ -5164,7 +5164,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]
@ -5186,7 +5186,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
"syn 2.0.103",
]
[[package]]

View File

@ -36,7 +36,7 @@ dashmap = { version = "6.1.0" }
http = "1"
indexmap = "2.9.0"
kittycad = { version = "0.3.37", default-features = false, features = ["js", "requests"] }
kittycad-modeling-cmds = { version = "0.2.125", features = ["ts-rs", "websocket"] }
kittycad-modeling-cmds = { version = "0.2.124", features = ["ts-rs", "websocket"] }
lazy_static = "1.5.0"
miette = "7.6.0"
pyo3 = { version = "0.24.2" }
@ -61,5 +61,5 @@ result_large_err = "allow"
# Example: how to point modeling-app at a different repo (e.g. a branch or a local clone)
#[patch.crates-io]
# kittycad-modeling-cmds = { path = "../../modeling-api/modeling-cmds/" }
# kittycad-modeling-session = { path = "../../modeling-api/modeling-session" }
#kittycad-modeling-cmds = { path = "../../../modeling-api/modeling-cmds" }
#kittycad-modeling-session = { path = "../../../modeling-api/modeling-session" }

View File

@ -1,7 +1,7 @@
[package]
name = "kcl-bumper"
version = "0.1.83"
version = "0.1.82"
edition = "2021"
repository = "https://github.com/KittyCAD/modeling-api"
rust-version = "1.76"
@ -19,7 +19,7 @@ anyhow = { workspace = true }
clap = { workspace = true, features = ["derive"] }
semver = "1.0.25"
serde = { workspace = true }
toml_edit = "0.22.27"
toml_edit = "0.22.26"
[lints]
workspace = true

View File

@ -1,7 +1,7 @@
[package]
name = "kcl-derive-docs"
description = "A tool for generating documentation from Rust derive macros"
version = "0.1.83"
version = "0.1.82"
edition = "2021"
license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app"
@ -14,7 +14,7 @@ bench = false
[dependencies]
proc-macro2 = "1"
quote = "1"
syn = { version = "2.0.104", features = ["full"] }
syn = { version = "2.0.103", features = ["full"] }
[lints]
workspace = true

View File

@ -1,7 +1,7 @@
[package]
name = "kcl-directory-test-macro"
description = "A tool for generating tests from a directory of kcl files"
version = "0.1.83"
version = "0.1.82"
edition = "2021"
license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app"
@ -14,7 +14,7 @@ bench = false
convert_case = "0.8.0"
proc-macro2 = "1"
quote = "1"
syn = { version = "2.0.104", features = ["full"] }
syn = { version = "2.0.103", features = ["full"] }
[lints]
workspace = true

View File

@ -1,6 +1,6 @@
[package]
name = "kcl-language-server-release"
version = "0.1.83"
version = "0.1.82"
edition = "2021"
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
publish = false

View File

@ -42,7 +42,7 @@ impl Build {
.to_string();
if !stable {
version = format!("{version}-nightly");
version = format!("{}-nightly", version);
}
let release_tag = if stable {
@ -59,7 +59,10 @@ impl Build {
if stable && !release_tag.contains(&version) {
// bail early if the tag doesn't match the version
// TODO: error here when we use the tags with kcl
println!("Tag {release_tag} doesn't match version {version}. Did you forget to update Cargo.toml?");
println!(
"Tag {} doesn't match version {}. Did you forget to update Cargo.toml?",
release_tag, version
);
}
build_server(sh, &version, &target)?;

View File

@ -95,10 +95,10 @@ async fn main() -> Result<()> {
// Format fields using the provided closure.
// We want to make this very concise otherwise the logs are not able to be read by humans.
let format = tracing_subscriber::fmt::format::debug_fn(|writer, field, value| {
if format!("{field}") == "message" {
write!(writer, "{field}: {value:?}")
if format!("{}", field) == "message" {
write!(writer, "{}: {:?}", field, value)
} else {
write!(writer, "{field}")
write!(writer, "{}", field)
}
})
// Separate each field with a comma.

View File

@ -2,7 +2,7 @@
name = "kcl-language-server"
description = "A language server for KCL."
authors = ["KittyCAD Inc <kcl@kittycad.io>"]
version = "0.2.83"
version = "0.2.82"
edition = "2021"
license = "MIT"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@ -123,7 +123,7 @@
"@vscode/test-electron": "^2.4.1",
"@vscode/vsce": "^3.3.2",
"cross-env": "^7.0.3",
"esbuild": "^0.25.3",
"esbuild": "^0.25.2",
"glob": "^11.0.1",
"mocha": "^11.1.0",
"typescript": "^5.8.3"

View File

@ -87,10 +87,10 @@ async fn main() -> Result<()> {
// Format fields using the provided closure.
// We want to make this very concise otherwise the logs are not able to be read by humans.
let format = tracing_subscriber::fmt::format::debug_fn(|writer, field, value| {
if format!("{field}") == "message" {
write!(writer, "{field}: {value:?}")
if format!("{}", field) == "message" {
write!(writer, "{}: {:?}", field, value)
} else {
write!(writer, "{field}")
write!(writer, "{}", field)
}
})
// Separate each field with a comma.
@ -151,7 +151,7 @@ async fn run_cmd(opts: &Opts) -> Result<()> {
tokio::spawn(async move {
if let Some(sig) = signals.forever().next() {
log::info!("received signal: {sig:?}");
log::info!("received signal: {:?}", sig);
log::info!("triggering cleanup...");
// Exit the process.

View File

@ -1,11 +1,11 @@
[package]
name = "kcl-lib"
description = "KittyCAD Language implementation and tools"
version = "0.2.83"
edition = "2024"
version = "0.2.82"
edition = "2021"
license = "MIT"
repository = "https://github.com/KittyCAD/modeling-app"
rust-version = "1.88"
rust-version = "1.83"
authors = ["Jess Frazelle", "Adam Chalmers", "KittyCAD, Inc"]
keywords = ["kcl", "KittyCAD", "CAD"]
exclude = ["tests/*", "benches/*", "examples/*", "e2e/*", "bindings/*", "fuzz/*"]
@ -74,7 +74,7 @@ sha2 = "0.10.9"
tabled = { version = "0.20.0", optional = true }
tempfile = "3.20"
thiserror = "2.0.0"
toml = "0.8.23"
toml = "0.8.22"
ts-rs = { version = "11.0.1", features = [
"uuid-impl",
"url-impl",

View File

@ -4,7 +4,7 @@ use std::{
path::{Path, PathBuf},
};
use criterion::{Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, Criterion};
const IGNORE_DIRS: [&str; 2] = ["step", "screenshots"];
@ -61,7 +61,7 @@ fn run_benchmarks(c: &mut Criterion) {
// Read the file content (panic on failure)
let input_content = fs::read_to_string(&input_file)
.unwrap_or_else(|e| panic!("Failed to read main.kcl in directory {dir_name}: {e}"));
.unwrap_or_else(|e| panic!("Failed to read main.kcl in directory {}: {}", dir_name, e));
// Create a benchmark group for this directory
let mut group = c.benchmark_group(&dir_name);
@ -72,12 +72,12 @@ fn run_benchmarks(c: &mut Criterion) {
#[cfg(feature = "benchmark-execution")]
let program = kcl_lib::Program::parse_no_errs(&input_content).unwrap();
group.bench_function(format!("parse_{dir_name}"), |b| {
group.bench_function(format!("parse_{}", dir_name), |b| {
b.iter(|| kcl_lib::Program::parse_no_errs(black_box(&input_content)).unwrap())
});
#[cfg(feature = "benchmark-execution")]
group.bench_function(format!("execute_{dir_name}"), |b| {
group.bench_function(format!("execute_{}", dir_name), |b| {
b.iter(|| {
if let Err(err) = rt.block_on(async {
let ctx = kcl_lib::ExecutorContext::new_with_default_client().await?;
@ -86,7 +86,7 @@ fn run_benchmarks(c: &mut Criterion) {
ctx.close().await;
Ok::<(), anyhow::Error>(())
}) {
panic!("Failed to execute program: {err}");
panic!("Failed to execute program: {}", err);
}
})
});

View File

@ -1,6 +1,6 @@
use std::hint::black_box;
use criterion::{Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, Criterion};
pub fn bench_parse(c: &mut Criterion) {
for (name, file) in [

View File

@ -1,4 +1,4 @@
use criterion::{Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, Criterion};
pub fn bench_digest(c: &mut Criterion) {
for (name, file) in [

View File

@ -1,6 +1,6 @@
use std::hint::black_box;
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use kcl_lib::kcl_lsp_server;
use tokio::runtime::Runtime;
use tower_lsp::LanguageServer;

View File

@ -1,9 +1,9 @@
//! Cache testing framework.
use kcl_lib::{ExecError, ExecOutcome, bust_cache};
use kcl_lib::{bust_cache, ExecError, ExecOutcome};
#[cfg(feature = "artifact-graph")]
use kcl_lib::{NodePathStep, exec::Operation};
use kcmc::{ModelingCmd, each_cmd as mcmd};
use kcl_lib::{exec::Operation, NodePathStep};
use kcmc::{each_cmd as mcmd, ModelingCmd};
use kittycad_modeling_cmds as kcmc;
use pretty_assertions::assert_eq;
@ -38,7 +38,7 @@ async fn cache_test(
if !variation.other_files.is_empty() {
let tmp_dir = std::env::temp_dir();
let tmp_dir = tmp_dir
.join(format!("kcl_test_{test_name}"))
.join(format!("kcl_test_{}", test_name))
.join(uuid::Uuid::new_v4().to_string());
// Create a temporary file for each of the other files.
@ -56,7 +56,7 @@ async fn cache_test(
Err(error) => {
let report = error.clone().into_miette_report_with_outputs(variation.code).unwrap();
let report = miette::Report::new(report);
panic!("{report:?}");
panic!("{:?}", report);
}
};
@ -69,7 +69,7 @@ async fn cache_test(
.and_then(|x| x.decode().map_err(|e| ExecError::BadPng(e.to_string())))
.unwrap();
// Save the snapshot.
let path = crate::assert_out(&format!("cache_{test_name}_{index}"), &img);
let path = crate::assert_out(&format!("cache_{}_{}", test_name, index), &img);
img_results.push((path, img, outcome));
}
@ -337,7 +337,8 @@ extrude001 = extrude(profile001, length = 4)
// 0] as a more lenient check.
.map(|c| !c.range.is_synthetic() && c.node_path.is_empty())
.unwrap_or(false),
"artifact={artifact:?}"
"artifact={:?}",
artifact
);
}
}

View File

@ -1,8 +1,8 @@
mod cache;
use kcl_lib::{
BacktraceItem, ExecError, ModuleId, SourceRange,
test_server::{execute_and_export_step, execute_and_snapshot, execute_and_snapshot_no_auth},
BacktraceItem, ExecError, ModuleId, SourceRange,
};
/// The minimum permissible difference between asserted twenty-twenty images.
@ -869,13 +869,11 @@ async fn kcl_test_revolve_bad_angle_low() {
let result = execute_and_snapshot(code, None).await;
assert!(result.is_err());
assert!(
result
assert!(result
.err()
.unwrap()
.to_string()
.contains("Expected angle to be between -360 and 360 and not 0, found `-455`")
);
.contains("Expected angle to be between -360 and 360 and not 0, found `-455`"));
}
#[tokio::test(flavor = "multi_thread")]
@ -897,13 +895,11 @@ async fn kcl_test_revolve_bad_angle_high() {
let result = execute_and_snapshot(code, None).await;
assert!(result.is_err());
assert!(
result
assert!(result
.err()
.unwrap()
.to_string()
.contains("Expected angle to be between -360 and 360 and not 0, found `455`")
);
.contains("Expected angle to be between -360 and 360 and not 0, found `455`"));
}
#[tokio::test(flavor = "multi_thread")]
@ -2094,10 +2090,7 @@ async fn kcl_test_better_type_names() {
},
None => todo!(),
};
assert_eq!(
err,
"This function expected the input argument to be one or more Solids or ImportedGeometry but it's actually of type Sketch. You can convert a sketch (2D) into a Solid (3D) by calling a function like `extrude` or `revolve`"
);
assert_eq!(err, "This function expected the input argument to be one or more Solids or ImportedGeometry but it's actually of type Sketch. You can convert a sketch (2D) into a Solid (3D) by calling a function like `extrude` or `revolve`");
}
#[tokio::test(flavor = "multi_thread")]

View File

@ -101,7 +101,7 @@ pub trait CoreDump: Clone {
.meta()
.create_debug_uploads(vec![kittycad::types::multipart::Attachment {
name: "".to_string(),
filepath: Some(format!(r#"modeling-app/coredump-{coredump_id}.json"#).into()),
filepath: Some(format!(r#"modeling-app/coredump-{}.json"#, coredump_id).into()),
content_type: Some("application/json".to_string()),
data,
}])

View File

@ -189,7 +189,7 @@ fn generate_example(index: usize, src: &str, props: &ExampleProperties, file_nam
index
);
let image_data =
std::fs::read(&image_path).unwrap_or_else(|_| panic!("Failed to read image file: {image_path}"));
std::fs::read(&image_path).unwrap_or_else(|_| panic!("Failed to read image file: {}", image_path));
base64::engine::general_purpose::STANDARD.encode(&image_data)
};
@ -225,7 +225,7 @@ fn generate_type_from_kcl(ty: &TyData, file_name: String, example_name: String,
let output = hbs.render("kclType", &data)?;
let output = cleanup_types(&output, kcl_std);
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
Ok(())
}
@ -267,7 +267,7 @@ fn generate_mod_from_kcl(m: &ModData, file_name: String) -> Result<()> {
});
let output = hbs.render("module", &data)?;
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
Ok(())
}
@ -334,7 +334,7 @@ fn generate_function_from_kcl(
let output = hbs.render("function", &data)?;
let output = &cleanup_types(&output, kcl_std);
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), output);
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), output);
Ok(())
}
@ -378,7 +378,7 @@ fn generate_const_from_kcl(cnst: &ConstData, file_name: String, example_name: St
let output = hbs.render("const", &data)?;
let output = cleanup_types(&output, kcl_std);
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
Ok(())
}

View File

@ -8,7 +8,6 @@ use tower_lsp::lsp_types::{
};
use crate::{
ModuleId,
execution::annotations,
parsing::{
ast::types::{
@ -16,6 +15,7 @@ use crate::{
},
token::NumericSuffix,
},
ModuleId,
};
pub fn walk_prelude() -> ModData {
@ -97,7 +97,7 @@ fn visit_module(name: &str, preferred_prefix: &str, names: WalkForNames) -> Resu
ImportSelector::None { .. } => {
let name = import.module_name().unwrap();
if names.contains(&name) {
Some(visit_module(&path[1], &format!("{name}::"), WalkForNames::All)?)
Some(visit_module(&path[1], &format!("{}::", name), WalkForNames::All)?)
} else {
None
}
@ -451,7 +451,7 @@ impl ModData {
let (name, qual_name, module_name) = if name == "prelude" {
("std", "std".to_owned(), String::new())
} else {
(name, format!("std::{name}"), "std".to_owned())
(name, format!("std::{}", name), "std".to_owned())
};
Self {
preferred_name: format!("{preferred_prefix}{name}"),
@ -767,12 +767,14 @@ impl ArgData {
for s in &arr.elements {
let Expr::Literal(lit) = s else {
panic!(
"Invalid value in `snippetArray`, all items must be string literals but found {s:?}"
"Invalid value in `snippetArray`, all items must be string literals but found {:?}",
s
);
};
let LiteralValue::String(litstr) = &lit.inner.value else {
panic!(
"Invalid value in `snippetArray`, all items must be string literals but found {s:?}"
"Invalid value in `snippetArray`, all items must be string literals but found {:?}",
s
);
};
items.push(litstr.to_owned());
@ -814,7 +816,7 @@ impl ArgData {
}
match self.ty.as_deref() {
Some("Sketch") if self.kind == ArgKind::Special => None,
Some(s) if s.starts_with("number") => Some((index, format!(r#"{label}${{{index}:10}}"#))),
Some(s) if s.starts_with("number") => Some((index, format!(r#"{label}${{{}:10}}"#, index))),
Some("Point2d") => Some((index + 1, format!(r#"{label}[${{{}:0}}, ${{{}:0}}]"#, index, index + 1))),
Some("Point3d") => Some((
index + 2,
@ -829,7 +831,7 @@ impl ArgData {
Some("Sketch") | Some("Sketch | Helix") => Some((index, format!(r#"{label}${{{index}:sketch000}}"#))),
Some("Edge") => Some((index, format!(r#"{label}${{{index}:tag_or_edge_fn}}"#))),
Some("[Edge; 1+]") => Some((index, format!(r#"{label}[${{{index}:tag_or_edge_fn}}]"#))),
Some("Plane") | Some("Solid | Plane") => Some((index, format!(r#"{label}${{{index}:XY}}"#))),
Some("Plane") | Some("Solid | Plane") => Some((index, format!(r#"{label}${{{}:XY}}"#, index))),
Some("[TaggedFace; 2]") => Some((
index + 1,
format!(r#"{label}[${{{}:tag}}, ${{{}:tag}}]"#, index, index + 1),
@ -839,10 +841,10 @@ impl ArgData {
if self.name == "color" {
Some((index, format!(r"{label}${{{}:{}}}", index, "\"#ff0000\"")))
} else {
Some((index, format!(r#"{label}${{{index}:"string"}}"#)))
Some((index, format!(r#"{label}${{{}:"string"}}"#, index)))
}
}
Some("bool") => Some((index, format!(r#"{label}${{{index}:false}}"#))),
Some("bool") => Some((index, format!(r#"{label}${{{}:false}}"#, index))),
_ => None,
}
}
@ -1296,10 +1298,7 @@ mod test {
continue;
}
let name = format!("{}-{i}", f.qual_name.replace("::", "-"));
assert!(
TEST_NAMES.contains(&&*name),
"Missing test for example \"{name}\", maybe need to update kcl-derive-docs/src/example_tests.rs?"
)
assert!(TEST_NAMES.contains(&&*name), "Missing test for example \"{name}\", maybe need to update kcl-derive-docs/src/example_tests.rs?")
}
}
}
@ -1335,9 +1334,7 @@ mod test {
};
let Some(DocData::Fn(d)) = data.children.get(&format!("I:{qualname}")) else {
panic!(
"Could not find data for {NAME} (missing a child entry for {qualname}), maybe need to update kcl-derive-docs/src/example_tests.rs?"
);
panic!("Could not find data for {NAME} (missing a child entry for {qualname}), maybe need to update kcl-derive-docs/src/example_tests.rs?");
};
for (i, eg) in d.examples.iter().enumerate() {
@ -1365,8 +1362,6 @@ mod test {
return;
}
panic!(
"Could not find data for {NAME} (no example {number}), maybe need to update kcl-derive-docs/src/example_tests.rs?"
);
panic!("Could not find data for {NAME} (no example {number}), maybe need to update kcl-derive-docs/src/example_tests.rs?");
}
}

View File

@ -2,11 +2,11 @@
//! tasks.
use std::sync::{
Arc,
atomic::{AtomicUsize, Ordering},
Arc,
};
use tokio::sync::{Notify, mpsc};
use tokio::sync::{mpsc, Notify};
use crate::errors::KclError;

View File

@ -3,26 +3,26 @@
use std::{collections::HashMap, sync::Arc};
use anyhow::{Result, anyhow};
use anyhow::{anyhow, Result};
use futures::{SinkExt, StreamExt};
use indexmap::IndexMap;
use kcmc::{
ModelingCmd,
websocket::{
BatchResponse, FailureWebSocketResponse, ModelingCmdReq, ModelingSessionData, OkWebSocketResponseData,
SuccessWebSocketResponse, WebSocketRequest, WebSocketResponse,
},
ModelingCmd,
};
use kittycad_modeling_cmds::{self as kcmc};
use tokio::sync::{RwLock, mpsc, oneshot};
use tokio::sync::{mpsc, oneshot, RwLock};
use tokio_tungstenite::tungstenite::Message as WsMsg;
use uuid::Uuid;
use crate::{
SourceRange,
engine::{AsyncTasks, EngineManager, EngineStats},
errors::{KclError, KclErrorDetails},
execution::{DefaultPlanes, IdGenerator},
SourceRange,
};
#[derive(Debug, PartialEq)]
@ -85,7 +85,7 @@ impl TcpRead {
let msg = match msg {
Ok(msg) => msg,
Err(e) if matches!(e, tokio_tungstenite::tungstenite::Error::Protocol(_)) => {
return Err(WebSocketReadError::Read(e));
return Err(WebSocketReadError::Read(e))
}
Err(e) => return Err(anyhow::anyhow!("Error reading from engine's WebSocket: {e}").into()),
};
@ -427,7 +427,7 @@ impl EngineManager for EngineConnection {
request_sent: tx,
})
.await
.map_err(|e| KclError::new_engine(KclErrorDetails::new(format!("Failed to send debug: {e}"), vec![])))?;
.map_err(|e| KclError::new_engine(KclErrorDetails::new(format!("Failed to send debug: {}", e), vec![])))?;
let _ = rx.await;
Ok(())
@ -463,7 +463,7 @@ impl EngineManager for EngineConnection {
.await
.map_err(|e| {
KclError::new_engine(KclErrorDetails::new(
format!("Failed to send modeling command: {e}"),
format!("Failed to send modeling command: {}", e),
vec![source_range],
))
})?;
@ -533,7 +533,7 @@ impl EngineManager for EngineConnection {
}
Err(KclError::new_engine(KclErrorDetails::new(
format!("Modeling command timed out `{id}`"),
format!("Modeling command timed out `{}`", id),
vec![source_range],
)))
}

View File

@ -12,16 +12,16 @@ use kcmc::{
WebSocketResponse,
},
};
use kittycad_modeling_cmds::{self as kcmc, ImportFiles, ModelingCmd, websocket::ModelingCmdReq};
use kittycad_modeling_cmds::{self as kcmc, websocket::ModelingCmdReq, ImportFiles, ModelingCmd};
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::{
SourceRange,
engine::{AsyncTasks, EngineStats},
errors::KclError,
exec::DefaultPlanes,
execution::IdGenerator,
SourceRange,
};
#[derive(Debug, Clone)]

View File

@ -11,10 +11,10 @@ use uuid::Uuid;
use wasm_bindgen::prelude::*;
use crate::{
SourceRange,
engine::{AsyncTasks, EngineStats},
errors::{KclError, KclErrorDetails},
execution::{DefaultPlanes, IdGenerator},
SourceRange,
};
#[wasm_bindgen(module = "/../../src/lang/std/engineConnection.ts")]

View File

@ -12,15 +12,15 @@ pub mod conn_wasm;
use std::{
collections::HashMap,
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
Arc,
},
};
pub use async_tasks::AsyncTasks;
use indexmap::IndexMap;
use kcmc::{
ModelingCmd, each_cmd as mcmd,
each_cmd as mcmd,
length_unit::LengthUnit,
ok_response::OkModelingCmdResponse,
shared::Color,
@ -28,6 +28,7 @@ use kcmc::{
BatchResponse, ModelingBatch, ModelingCmdReq, ModelingSessionData, OkWebSocketResponseData, WebSocketRequest,
WebSocketResponse,
},
ModelingCmd,
};
use kittycad_modeling_cmds as kcmc;
use parse_display::{Display, FromStr};
@ -38,9 +39,9 @@ use uuid::Uuid;
use web_time::Instant;
use crate::{
SourceRange,
errors::{KclError, KclErrorDetails},
execution::{DefaultPlanes, IdGenerator, PlaneInfo, Point3d, types::UnitLen},
execution::{types::UnitLen, DefaultPlanes, IdGenerator, PlaneInfo, Point3d},
SourceRange,
};
lazy_static::lazy_static! {
@ -290,10 +291,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
// the artifact graph won't care either if its gone since you can't select it
// anymore anyways.
if let Err(err) = self.async_tasks().join_all().await {
crate::log::logln!(
"Error waiting for async tasks (this is typically fine and just means that an edge became something else): {:?}",
err
);
crate::log::logln!("Error waiting for async tasks (this is typically fine and just means that an edge became something else): {:?}", err);
}
// Flush the batch to make sure nothing remains.
@ -501,7 +499,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
}
_ => {
return Err(KclError::new_engine(KclErrorDetails::new(
format!("The request is not a modeling command: {req:?}"),
format!("The request is not a modeling command: {:?}", req),
vec![*range],
)));
}
@ -531,7 +529,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
} else {
// We should never get here.
Err(KclError::new_engine(KclErrorDetails::new(
format!("Failed to get batch response: {response:?}"),
format!("Failed to get batch response: {:?}", response),
vec![source_range],
)))
}
@ -546,7 +544,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
// an error.
let source_range = id_to_source_range.get(cmd_id.as_ref()).cloned().ok_or_else(|| {
KclError::new_engine(KclErrorDetails::new(
format!("Failed to get source range for command ID: {cmd_id:?}"),
format!("Failed to get source range for command ID: {:?}", cmd_id),
vec![],
))
})?;
@ -556,7 +554,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
self.parse_websocket_response(ws_resp, source_range)
}
_ => Err(KclError::new_engine(KclErrorDetails::new(
format!("The final request is not a modeling command: {final_req:?}"),
format!("The final request is not a modeling command: {:?}", final_req),
vec![source_range],
))),
}
@ -665,7 +663,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
let info = DEFAULT_PLANE_INFO.get(&name).ok_or_else(|| {
// We should never get here.
KclError::new_engine(KclErrorDetails::new(
format!("Failed to get default plane info for: {name:?}"),
format!("Failed to get default plane info for: {:?}", name),
vec![source_range],
))
})?;
@ -741,7 +739,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
// Get the source range for the command.
let source_range = id_to_source_range.get(cmd_id).cloned().ok_or_else(|| {
KclError::new_engine(KclErrorDetails::new(
format!("Failed to get source range for command ID: {cmd_id:?}"),
format!("Failed to get source range for command ID: {:?}", cmd_id),
vec![],
))
})?;
@ -756,7 +754,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
// Return an error that we did not get an error or the response we wanted.
// This should never happen but who knows.
Err(KclError::new_engine(KclErrorDetails::new(
format!("Failed to find response for command ID: {id:?}"),
format!("Failed to find response for command ID: {:?}", id),
vec![],
)))
}

View File

@ -7,11 +7,11 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
#[cfg(feature = "artifact-graph")]
use crate::execution::{ArtifactCommand, ArtifactGraph, Operation};
use crate::{
ModuleId,
execution::DefaultPlanes,
lsp::IntoDiagnostic,
modules::{ModulePath, ModuleSource},
source_range::SourceRange,
ModuleId,
};
/// How did the KCL execution fail

View File

@ -2,13 +2,13 @@
use std::str::FromStr;
use kittycad_modeling_cmds::coord::{KITTYCAD, OPENGL, System, VULKAN};
use kittycad_modeling_cmds::coord::{System, KITTYCAD, OPENGL, VULKAN};
use crate::{
KclError, SourceRange,
errors::KclErrorDetails,
execution::types::{UnitAngle, UnitLen},
parsing::ast::types::{Annotation, Expr, LiteralValue, Node, ObjectProperty},
KclError, SourceRange,
};
/// Annotations which should cause re-execution if they change.

View File

@ -1,19 +1,20 @@
use fnv::FnvHashMap;
use indexmap::IndexMap;
use kittycad_modeling_cmds::{
self as kcmc, EnableSketchMode, ModelingCmd,
self as kcmc,
ok_response::OkModelingCmdResponse,
shared::ExtrusionFaceCapType,
websocket::{BatchResponse, OkWebSocketResponseData, WebSocketResponse},
EnableSketchMode, ModelingCmd,
};
use serde::{Serialize, ser::SerializeSeq};
use serde::{ser::SerializeSeq, Serialize};
use uuid::Uuid;
use crate::{
KclError, NodePath, SourceRange,
errors::KclErrorDetails,
execution::ArtifactId,
parsing::ast::types::{Node, Program},
KclError, NodePath, SourceRange,
};
#[cfg(test)]
@ -892,10 +893,7 @@ fn artifacts_to_update(
),
};
if original_path_ids.len() != face_edge_infos.len() {
internal_error!(
range,
"EntityMirror or EntityMirrorAcrossEdge response has different number face edge info than original mirrored paths: id={id:?}, cmd={cmd:?}, response={response:?}"
);
internal_error!(range, "EntityMirror or EntityMirrorAcrossEdge response has different number face edge info than original mirrored paths: id={id:?}, cmd={cmd:?}, response={response:?}");
}
let mut return_arr = Vec::new();
for (face_edge_info, original_path_id) in face_edge_infos.iter().zip(original_path_ids) {
@ -911,10 +909,7 @@ fn artifacts_to_update(
// of its info.
let Some(Artifact::Path(original_path)) = artifacts.get(&original_path_id) else {
// We couldn't find the original path. This is a bug.
internal_error!(
range,
"Couldn't find original path for mirror2d: original_path_id={original_path_id:?}, cmd={cmd:?}"
);
internal_error!(range, "Couldn't find original path for mirror2d: original_path_id={original_path_id:?}, cmd={cmd:?}");
};
Path {
id: path_id,

View File

@ -268,7 +268,7 @@ impl ArtifactGraph {
for (group_id, artifact_ids) in groups {
let group_id = *stable_id_map.get(&group_id).unwrap();
writeln!(output, "{prefix}subgraph path{group_id} [Path]")?;
let indented = format!("{prefix} ");
let indented = format!("{} ", prefix);
for artifact_id in artifact_ids {
let artifact = self.map.get(&artifact_id).unwrap();
let id = *stable_id_map.get(&artifact_id).unwrap();
@ -353,7 +353,7 @@ impl ArtifactGraph {
node_path_display(output, prefix, None, &segment.code_ref)?;
}
Artifact::Solid2d(_solid2d) => {
writeln!(output, "{prefix}{id}[Solid2d]")?;
writeln!(output, "{prefix}{}[Solid2d]", id)?;
}
Artifact::StartSketchOnFace(StartSketchOnFace { code_ref, .. }) => {
writeln!(
@ -494,24 +494,24 @@ impl ArtifactGraph {
match edge.flow {
EdgeFlow::SourceToTarget => match edge.direction {
EdgeDirection::Forward => {
writeln!(output, "{prefix}{source_id} x{extra}--> {target_id}")?;
writeln!(output, "{prefix}{source_id} x{}--> {}", extra, target_id)?;
}
EdgeDirection::Backward => {
writeln!(output, "{prefix}{source_id} <{extra}--x {target_id}")?;
writeln!(output, "{prefix}{source_id} <{}--x {}", extra, target_id)?;
}
EdgeDirection::Bidirectional => {
writeln!(output, "{prefix}{source_id} {extra}--- {target_id}")?;
writeln!(output, "{prefix}{source_id} {}--- {}", extra, target_id)?;
}
},
EdgeFlow::TargetToSource => match edge.direction {
EdgeDirection::Forward => {
writeln!(output, "{prefix}{target_id} x{extra}--> {source_id}")?;
writeln!(output, "{prefix}{target_id} x{}--> {}", extra, source_id)?;
}
EdgeDirection::Backward => {
writeln!(output, "{prefix}{target_id} <{extra}--x {source_id}")?;
writeln!(output, "{prefix}{target_id} <{}--x {}", extra, source_id)?;
}
EdgeDirection::Bidirectional => {
writeln!(output, "{prefix}{target_id} {extra}--- {source_id}")?;
writeln!(output, "{prefix}{target_id} {}--- {}", extra, source_id)?;
}
},
}

View File

@ -6,14 +6,15 @@ use itertools::{EitherOrBoth, Itertools};
use tokio::sync::RwLock;
use crate::{
ExecOutcome, ExecutorContext,
execution::{
EnvironmentRef, ExecutorSettings, annotations,
annotations,
memory::Stack,
state::{self as exec_state, ModuleInfoMap},
EnvironmentRef, ExecutorSettings,
},
parsing::ast::types::{Annotation, Node, Program},
walk::Node as WalkNode,
ExecOutcome, ExecutorContext,
};
lazy_static::lazy_static! {
@ -336,7 +337,7 @@ mod tests {
use pretty_assertions::assert_eq;
use super::*;
use crate::execution::{ExecTestResults, parse_execute, parse_execute_with_project_dir};
use crate::execution::{parse_execute, parse_execute_with_project_dir, ExecTestResults};
#[tokio::test(flavor = "multi_thread")]
async fn test_get_changed_program_same_code() {
@ -754,7 +755,7 @@ extrude(profile001, length = 100)"#
.await;
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
panic!("Expected CheckImportsOnly, got {result:?}");
panic!("Expected CheckImportsOnly, got {:?}", result);
};
assert_eq!(reapply_settings, false);
@ -838,7 +839,7 @@ extrude(profile001, length = 100)
.await;
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
panic!("Expected CheckImportsOnly, got {result:?}");
panic!("Expected CheckImportsOnly, got {:?}", result);
};
assert_eq!(reapply_settings, false);

View File

@ -1,10 +1,10 @@
use indexmap::IndexMap;
use serde::Serialize;
use super::{ArtifactId, KclValue, types::NumericType};
use super::{types::NumericType, ArtifactId, KclValue};
#[cfg(feature = "artifact-graph")]
use crate::parsing::ast::types::{Node, Program};
use crate::{ModuleId, NodePath, SourceRange, parsing::ast::types::ItemVisibility};
use crate::{parsing::ast::types::ItemVisibility, ModuleId, NodePath, SourceRange};
/// A CAD modeling operation for display in the feature tree, AKA operations
/// timeline.
@ -57,7 +57,7 @@ impl Operation {
/// If the variant is `StdLibCall`, set the `is_error` field.
pub(crate) fn set_std_lib_call_is_error(&mut self, is_err: bool) {
match self {
Self::StdLibCall { is_error, .. } => *is_error = is_err,
Self::StdLibCall { ref mut is_error, .. } => *is_error = is_err,
Self::VariableDeclaration { .. } | Self::GroupBegin { .. } | Self::GroupEnd => {}
}
}

View File

@ -3,31 +3,29 @@ use std::collections::HashMap;
use async_recursion::async_recursion;
use crate::{
CompilationError, NodePath,
errors::{KclError, KclErrorDetails},
execution::{
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, ModelingCmdMeta, ModuleArtifactState,
Operation, PlaneType, StatementKind, TagIdentifier, annotations,
annotations,
cad_op::OpKclValue,
fn_call::Args,
kcl_value::{FunctionSource, TypeDef},
memory,
state::ModuleState,
types::{NumericType, PrimitiveType, RuntimeType},
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, ModelingCmdMeta, ModuleArtifactState,
Operation, PlaneType, StatementKind, TagIdentifier,
},
fmt,
modules::{ModuleId, ModulePath, ModuleRepr},
parsing::{
ast::types::{
parsing::ast::types::{
Annotation, ArrayExpression, ArrayRangeExpression, AscribedExpression, BinaryExpression, BinaryOperator,
BinaryPart, BodyItem, Expr, IfExpression, ImportPath, ImportSelector, ItemVisibility, LiteralIdentifier,
LiteralValue, MemberExpression, Name, Node, NodeRef, ObjectExpression, PipeExpression, Program,
TagDeclarator, Type, UnaryExpression, UnaryOperator,
},
token::NumericSuffix,
LiteralValue, MemberExpression, Name, Node, NodeRef, ObjectExpression, PipeExpression, Program, TagDeclarator,
Type, UnaryExpression, UnaryOperator,
},
source_range::SourceRange,
std::args::TyF64,
CompilationError, NodePath,
};
impl<'a> StatementKind<'a> {
@ -197,23 +195,19 @@ impl ExecutorContext {
}
if ty.is_ok() && !module_exports.contains(&ty_name) {
ty = Err(KclError::new_semantic(KclErrorDetails::new(
format!(
ty = Err(KclError::new_semantic(KclErrorDetails::new(format!(
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
import_item.name.name
),
vec![SourceRange::from(&import_item.name)],
)));
vec![SourceRange::from(&import_item.name)],)));
}
if mod_value.is_ok() && !module_exports.contains(&mod_name) {
mod_value = Err(KclError::new_semantic(KclErrorDetails::new(
format!(
mod_value = Err(KclError::new_semantic(KclErrorDetails::new(format!(
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
import_item.name.name
),
vec![SourceRange::from(&import_item.name)],
)));
vec![SourceRange::from(&import_item.name)],)));
}
if value.is_err() && ty.is_err() && mod_value.is_err() {
@ -273,7 +267,7 @@ impl ExecutorContext {
.get_from(name, env_ref, source_range, 0)
.map_err(|_err| {
KclError::new_internal(KclErrorDetails::new(
format!("{name} is not defined in module (but was exported?)"),
format!("{} is not defined in module (but was exported?)", name),
vec![source_range],
))
})?
@ -434,7 +428,7 @@ impl ExecutorContext {
return Err(KclError::new_semantic(KclErrorDetails::new(
"User-defined types are not yet supported.".to_owned(),
vec![metadata.source_range],
)));
)))
}
},
}
@ -795,12 +789,11 @@ fn var_in_own_ref_err(e: KclError, being_declared: &Option<String>) -> KclError
// TODO after June 26th: replace this with a let-chain,
// which will be available in Rust 1.88
// https://rust-lang.github.io/rfcs/2497-if-let-chains.html
if let (Some(name0), Some(name1)) = (&being_declared, &name)
&& name0 == name1
{
details.message = format!(
"You can't use `{name0}` because you're currently trying to define it. Use a different variable here instead."
);
match (&being_declared, &name) {
(Some(name0), Some(name1)) if name0 == name1 => {
details.message = format!("You can't use `{name0}` because you're currently trying to define it. Use a different variable here instead.");
}
_ => {}
}
KclError::UndefinedValue { details, name }
}
@ -1081,7 +1074,7 @@ impl Node<BinaryExpression> {
(&left_value, &right_value)
{
return Ok(KclValue::String {
value: format!("{left}{right}"),
value: format!("{}{}", left, right),
meta,
});
}
@ -1241,9 +1234,7 @@ impl Node<BinaryExpression> {
exec_state.clear_units_warnings(&sr);
let mut err = CompilationError::err(
sr,
format!(
"{verb} numbers which have unknown or incompatible units.\nYou can probably fix this error by specifying the units using type ascription, e.g., `len: number(mm)` or `(a * b): number(deg)`."
),
format!("{} numbers which have unknown or incompatible units.\nYou can probably fix this error by specifying the units using type ascription, e.g., `len: number(mm)` or `(a * b): number(deg)`.", verb),
);
err.tag = crate::errors::Tag::UnknownNumericUnits;
exec_state.warn(err);
@ -1423,7 +1414,7 @@ async fn inner_execute_pipe_body(
for expression in body {
if let Expr::TagDeclarator(_) = expression {
return Err(KclError::new_semantic(KclErrorDetails::new(
format!("This cannot be in a PipeExpression: {expression:?}"),
format!("This cannot be in a PipeExpression: {:?}", expression),
vec![expression.into()],
)));
}
@ -1675,18 +1666,12 @@ impl Property {
LiteralIdentifier::Literal(literal) => {
let value = literal.value.clone();
match value {
n @ LiteralValue::Number { value, suffix } => {
if !matches!(suffix, NumericSuffix::None | NumericSuffix::Count) {
return Err(KclError::new_semantic(KclErrorDetails::new(
format!("{n} is not a valid index, indices must be non-dimensional numbers"),
property_sr,
)));
}
LiteralValue::Number { value, .. } => {
if let Some(x) = crate::try_f64_to_usize(value) {
Ok(Property::UInt(x))
} else {
Err(KclError::new_semantic(KclErrorDetails::new(
format!("{n} is not a valid index, indices must be whole numbers >= 0"),
format!("{value} is not a valid index, indices must be whole numbers >= 0"),
property_sr,
)))
}
@ -1705,33 +1690,22 @@ fn jvalue_to_prop(value: &KclValue, property_sr: Vec<SourceRange>, name: &str) -
let make_err =
|message: String| Err::<Property, _>(KclError::new_semantic(KclErrorDetails::new(message, property_sr)));
match value {
n @ KclValue::Number { value: num, ty, .. } => {
if !matches!(
ty,
NumericType::Known(crate::exec::UnitType::Count) | NumericType::Default { .. } | NumericType::Any
) {
return make_err(format!(
"arrays can only be indexed by non-dimensioned numbers, found {}",
n.human_friendly_type()
));
}
KclValue::Number{value: num, .. } => {
let num = *num;
if num < 0.0 {
return make_err(format!("'{num}' is negative, so you can't index an array with it"));
return make_err(format!("'{num}' is negative, so you can't index an array with it"))
}
let nearest_int = crate::try_f64_to_usize(num);
if let Some(nearest_int) = nearest_int {
Ok(Property::UInt(nearest_int))
} else {
make_err(format!(
"'{num}' is not an integer, so you can't index an array with it"
))
make_err(format!("'{num}' is not an integer, so you can't index an array with it"))
}
}
KclValue::String{value: x, meta:_} => Ok(Property::String(x.to_owned())),
_ => make_err(format!(
"{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array"
)),
_ => {
make_err(format!("{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array"))
}
}
}
@ -1759,9 +1733,9 @@ mod test {
use super::*;
use crate::{
ExecutorSettings, UnitLen,
exec::UnitType,
execution::{ContextType, parse_execute},
execution::{parse_execute, ContextType},
ExecutorSettings, UnitLen,
};
#[tokio::test(flavor = "multi_thread")]
@ -1791,7 +1765,7 @@ arr1 = [42]: [number(cm)]
.get_from("arr1", result.mem_env, SourceRange::default(), 0)
.unwrap();
if let KclValue::HomArray { value, ty } = arr1 {
assert_eq!(value.len(), 1, "Expected Vec with specific length: found {value:?}");
assert_eq!(value.len(), 1, "Expected Vec with specific length: found {:?}", value);
assert_eq!(*ty, RuntimeType::known_length(UnitLen::Cm));
// Compare, ignoring meta.
if let KclValue::Number { value, ty, .. } = &value[0] {
@ -1960,7 +1934,7 @@ d = b + c
.await
.map_err(|err| {
KclError::new_internal(KclErrorDetails::new(
format!("Failed to create mock engine connection: {err}"),
format!("Failed to create mock engine connection: {}", err),
vec![SourceRange::default()],
))
})
@ -2167,23 +2141,4 @@ c = ((PI * 2) / 3): number(deg)
let result = parse_execute(ast).await.unwrap();
assert_eq!(result.exec_state.errors().len(), 2);
}
#[tokio::test(flavor = "multi_thread")]
async fn non_count_indexing() {
let ast = r#"x = [0, 0]
y = x[1mm]
"#;
parse_execute(ast).await.unwrap_err();
let ast = r#"x = [0, 0]
y = 1deg
z = x[y]
"#;
parse_execute(ast).await.unwrap_err();
let ast = r#"x = [0, 0]
y = x[0mm + 1]
"#;
parse_execute(ast).await.unwrap_err();
}
}

View File

@ -2,19 +2,19 @@ use async_recursion::async_recursion;
use indexmap::IndexMap;
use crate::{
CompilationError, NodePath,
errors::{KclError, KclErrorDetails},
execution::{
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, StatementKind, TagEngineInfo,
TagIdentifier,
cad_op::{Group, OpArg, OpKclValue, Operation},
kcl_value::FunctionSource,
memory,
types::RuntimeType,
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, StatementKind, TagEngineInfo,
TagIdentifier,
},
parsing::ast::types::{CallExpressionKw, DefaultParamVal, FunctionExpression, Node, Program, Type},
source_range::SourceRange,
std::StdFn,
CompilationError, NodePath,
};
#[derive(Debug, Clone)]
@ -269,7 +269,7 @@ impl Node<CallExpressionKw> {
};
KclError::new_undefined_value(
KclErrorDetails::new(
format!("Result of user-defined function {fn_name} is undefined"),
format!("Result of user-defined function {} is undefined", fn_name),
source_ranges,
),
None,
@ -401,7 +401,7 @@ impl FunctionDefinition<'_> {
impl FunctionBody<'_> {
fn prep_mem(&self, exec_state: &mut ExecState) {
match self {
FunctionBody::Rust(_) => exec_state.mut_stack().push_new_root_env(true),
FunctionBody::Rust(_) => exec_state.mut_stack().push_new_env_for_rust_call(),
FunctionBody::Kcl(_, memory) => exec_state.mut_stack().push_new_env_for_call(*memory),
}
}
@ -445,7 +445,7 @@ fn update_memory_for_tags_of_geometry(result: &mut KclValue, exec_state: &mut Ex
}
}
}
KclValue::Solid { value } => {
KclValue::Solid { ref mut value } => {
for v in &value.value {
if let Some(tag) = v.get_tag() {
// Get the past tag and update it.
@ -555,9 +555,9 @@ fn type_err_str(expected: &Type, found: &KclValue, source_range: &SourceRange, e
let found_human = found.human_friendly_type();
let found_ty = found.principal_type_string();
let found_str = if found_human == found_ty || found_human == format!("a {}", strip_backticks(&found_ty)) {
format!("a value with type {found_ty}")
format!("a value with type {}", found_ty)
} else {
format!("{found_human} (with type {found_ty})")
format!("{found_human} (with type {})", found_ty)
};
let mut result = format!("{expected_str}, but found {found_str}.");
@ -626,7 +626,7 @@ fn type_check_params_kw(
format!(
"`{label}` is not an argument of {}",
fn_name
.map(|n| format!("`{n}`"))
.map(|n| format!("`{}`", n))
.unwrap_or_else(|| "this function".to_owned()),
),
));
@ -676,7 +676,7 @@ fn type_check_params_kw(
format!(
"The input argument of {} requires {}",
fn_name
.map(|n| format!("`{n}`"))
.map(|n| format!("`{}`", n))
.unwrap_or_else(|| "this function".to_owned()),
type_err_str(ty, &arg.1.value, &arg.1.source_range, exec_state),
),
@ -691,7 +691,7 @@ fn type_check_params_kw(
format!(
"{} expects an unlabeled first argument (`@{name}`), but it is labelled in the call",
fn_name
.map(|n| format!("The function `{n}`"))
.map(|n| format!("The function `{}`", n))
.unwrap_or_else(|| "This function".to_owned()),
),
));
@ -721,7 +721,7 @@ fn assign_args_to_params_kw(
)?;
}
None => match default {
Some(default_val) => {
Some(ref default_val) => {
let value = KclValue::from_default_param(default_val.clone(), exec_state);
exec_state
.mut_stack()
@ -729,7 +729,10 @@ fn assign_args_to_params_kw(
}
None => {
return Err(KclError::new_semantic(KclErrorDetails::new(
format!("This function requires a parameter {name}, but you haven't passed it one."),
format!(
"This function requires a parameter {}, but you haven't passed it one.",
name
),
source_ranges,
)));
}
@ -743,9 +746,7 @@ fn assign_args_to_params_kw(
let Some(unlabeled) = unlabelled else {
return Err(if args.kw_args.labeled.contains_key(param_name) {
KclError::new_semantic(KclErrorDetails::new(
format!(
"The function does declare a parameter named '{param_name}', but this parameter doesn't use a label. Try removing the `{param_name}:`"
),
format!("The function does declare a parameter named '{param_name}', but this parameter doesn't use a label. Try removing the `{param_name}:`"),
source_ranges,
))
} else {
@ -798,7 +799,7 @@ mod test {
use super::*;
use crate::{
execution::{ContextType, memory::Stack, parse_execute, types::NumericType},
execution::{memory::Stack, parse_execute, types::NumericType, ContextType},
parsing::ast::types::{DefaultParamVal, Identifier, Parameter},
};

View File

@ -3,16 +3,16 @@ use std::ops::{Add, AddAssign, Mul};
use anyhow::Result;
use indexmap::IndexMap;
use kittycad_modeling_cmds as kcmc;
use kittycad_modeling_cmds::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit, websocket::ModelingCmdReq};
use kittycad_modeling_cmds::{each_cmd as mcmd, length_unit::LengthUnit, websocket::ModelingCmdReq, ModelingCmd};
use parse_display::{Display, FromStr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{
engine::{DEFAULT_PLANE_INFO, PlaneName},
engine::{PlaneName, DEFAULT_PLANE_INFO},
errors::{KclError, KclErrorDetails},
execution::{
ArtifactId, ExecState, ExecutorContext, Metadata, TagEngineInfo, TagIdentifier, UnitLen, types::NumericType,
types::NumericType, ArtifactId, ExecState, ExecutorContext, Metadata, TagEngineInfo, TagIdentifier, UnitLen,
},
parsing::ast::types::{Node, NodeRef, TagDeclarator, TagNode},
std::{args::TyF64, sketch::PlaneData},
@ -472,7 +472,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
PlaneData::Plane(_) => {
// We will never get here since we already checked for PlaneData::Plane.
return Err(KclError::new_internal(KclErrorDetails::new(
format!("PlaneData {value:?} not found"),
format!("PlaneData {:?} not found", value),
Default::default(),
)));
}
@ -480,7 +480,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
let info = DEFAULT_PLANE_INFO.get(&name).ok_or_else(|| {
KclError::new_internal(KclErrorDetails::new(
format!("Plane {name} not found"),
format!("Plane {} not found", name),
Default::default(),
))
})?;
@ -815,8 +815,8 @@ impl EdgeCut {
pub fn set_id(&mut self, id: uuid::Uuid) {
match self {
EdgeCut::Fillet { id: i, .. } => *i = id,
EdgeCut::Chamfer { id: i, .. } => *i = id,
EdgeCut::Fillet { id: ref mut i, .. } => *i = id,
EdgeCut::Chamfer { id: ref mut i, .. } => *i = id,
}
}
@ -829,8 +829,8 @@ impl EdgeCut {
pub fn set_edge_id(&mut self, id: uuid::Uuid) {
match self {
EdgeCut::Fillet { edge_id: i, .. } => *i = id,
EdgeCut::Chamfer { edge_id: i, .. } => *i = id,
EdgeCut::Fillet { edge_id: ref mut i, .. } => *i = id,
EdgeCut::Chamfer { edge_id: ref mut i, .. } => *i = id,
}
}

View File

@ -2,12 +2,12 @@ use std::str::FromStr;
use anyhow::Result;
use kcmc::{
ImportFile, ModelingCmd,
coord::{KITTYCAD, System},
coord::{System, KITTYCAD},
each_cmd as mcmd,
format::InputFormat3d,
shared::FileImportFormat,
units::UnitLength,
ImportFile, ModelingCmd,
};
use kittycad_modeling_cmds as kcmc;
use serde::{Deserialize, Serialize};
@ -16,8 +16,8 @@ use uuid::Uuid;
use crate::{
errors::{KclError, KclErrorDetails},
execution::{
ExecState, ExecutorContext, ImportedGeometry, ModelingCmdMeta, annotations, typed_path::TypedPath,
types::UnitLen,
annotations, typed_path::TypedPath, types::UnitLen, ExecState, ExecutorContext, ImportedGeometry,
ModelingCmdMeta,
},
fs::FileSystem,
parsing::ast::types::{Annotation, Node},
@ -184,7 +184,7 @@ pub(super) fn format_from_annotations(
annotations::IMPORT_LENGTH_UNIT
),
vec![p.as_source_range()],
)));
)))
}
}
}
@ -225,7 +225,7 @@ fn set_coords(fmt: &mut InputFormat3d, coords_str: &str, source_range: SourceRan
annotations::IMPORT_COORDS
),
vec![source_range],
)));
)))
}
}
@ -246,7 +246,7 @@ fn set_length_unit(fmt: &mut InputFormat3d, units_str: &str, source_range: Sourc
annotations::IMPORT_LENGTH_UNIT
),
vec![source_range],
)));
)))
}
}
@ -291,9 +291,7 @@ fn get_import_format_from_extension(ext: &str) -> Result<InputFormat3d> {
} else if ext == "glb" {
FileImportFormat::Gltf
} else {
anyhow::bail!(
"unknown source format for file extension: {ext}. Try setting the `--src-format` flag explicitly or use a valid format."
)
anyhow::bail!("unknown source format for file extension: {ext}. Try setting the `--src-format` flag explicitly or use a valid format.")
}
}
};

View File

@ -6,12 +6,12 @@ use std::{
use anyhow::Result;
use crate::{
ExecState, ExecutorContext, KclError, ModuleId, SourceRange,
errors::KclErrorDetails,
execution::typed_path::TypedPath,
modules::{ModulePath, ModuleRepr},
parsing::ast::types::{ImportPath, ImportStatement, Node as AstNode},
walk::{Node, Visitable},
ExecState, ExecutorContext, KclError, ModuleId, SourceRange,
};
/// Specific dependency between two modules. The 0th element of this info
@ -147,7 +147,7 @@ fn import_dependencies(
ret.lock()
.map_err(|err| {
KclError::new_internal(KclErrorDetails::new(
format!("Failed to lock mutex: {err}"),
format!("Failed to lock mutex: {}", err),
Default::default(),
))
})?
@ -157,7 +157,7 @@ fn import_dependencies(
ret.lock()
.map_err(|err| {
KclError::new_internal(KclErrorDetails::new(
format!("Failed to lock mutex: {err}"),
format!("Failed to lock mutex: {}", err),
Default::default(),
))
})?
@ -179,7 +179,7 @@ fn import_dependencies(
let ret = ret.lock().map_err(|err| {
KclError::new_internal(KclErrorDetails::new(
format!("Failed to lock mutex: {err}"),
format!("Failed to lock mutex: {}", err),
Default::default(),
))
})?;
@ -224,7 +224,7 @@ pub(crate) async fn import_universe(
let repr = {
let Some(module_info) = exec_state.get_module(module_id) else {
return Err(KclError::new_internal(KclErrorDetails::new(
format!("Module {module_id} not found"),
format!("Module {} not found", module_id),
vec![import_stmt.into()],
)));
};
@ -244,7 +244,9 @@ mod tests {
use crate::parsing::ast::types::{ImportSelector, Program};
macro_rules! kcl {
( $kcl:expr_2021 ) => {{ $crate::parsing::top_level_parse($kcl).unwrap() }};
( $kcl:expr ) => {{
$crate::parsing::top_level_parse($kcl).unwrap()
}};
}
fn into_module_info(program: AstNode<Program>) -> DependencyInfo {

View File

@ -5,18 +5,18 @@ use schemars::JsonSchema;
use serde::Serialize;
use crate::{
CompilationError, KclError, ModuleId, SourceRange,
errors::KclErrorDetails,
execution::{
EnvironmentRef, ExecState, Face, Geometry, GeometryWithImportedGeometry, Helix, ImportedGeometry, MetaSettings,
Metadata, Plane, Sketch, Solid, TagIdentifier,
annotations::{SETTINGS, SETTINGS_UNIT_LENGTH},
types::{NumericType, PrimitiveType, RuntimeType, UnitLen},
EnvironmentRef, ExecState, Face, Geometry, GeometryWithImportedGeometry, Helix, ImportedGeometry, MetaSettings,
Metadata, Plane, Sketch, Solid, TagIdentifier,
},
parsing::ast::types::{
DefaultParamVal, FunctionExpression, KclNone, Literal, LiteralValue, Node, TagDeclarator, TagNode,
},
std::{StdFnProps, args::TyF64},
std::{args::TyF64, StdFnProps},
CompilationError, KclError, ModuleId, SourceRange,
};
pub type KclObjectFields = HashMap<String, KclValue>;
@ -136,9 +136,9 @@ impl JsonSchema for FunctionSource {
"FunctionSource".to_owned()
}
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
// TODO: Actually generate a reasonable schema.
r#gen.subschema_for::<()>()
gen.subschema_for::<()>()
}
}
@ -587,7 +587,7 @@ impl KclValue {
match self {
KclValue::TagIdentifier(t) => Ok(*t.clone()),
_ => Err(KclError::new_semantic(KclErrorDetails::new(
format!("Not a tag identifier: {self:?}"),
format!("Not a tag identifier: {:?}", self),
self.clone().into(),
))),
}
@ -598,7 +598,7 @@ impl KclValue {
match self {
KclValue::TagDeclarator(t) => Ok((**t).clone()),
_ => Err(KclError::new_semantic(KclErrorDetails::new(
format!("Not a tag declarator: {self:?}"),
format!("Not a tag declarator: {:?}", self),
self.clone().into(),
))),
}

View File

@ -207,8 +207,8 @@ use std::{
fmt,
pin::Pin,
sync::{
Arc,
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc,
},
};
@ -489,7 +489,7 @@ impl ProgramMemory {
}
Err(KclError::new_undefined_value(
KclErrorDetails::new(format!("`{var}` is not defined"), vec![]),
KclErrorDetails::new(format!("`{}` is not defined", var), vec![]),
Some(var.to_owned()),
))
}
@ -541,6 +541,22 @@ impl Stack {
self.push_new_env_for_call(snapshot);
}
/// Push a new stack frame on to the call stack for callees which should not read or write
/// from memory.
///
/// This is suitable for calling standard library functions or other functions written in Rust
/// which will use 'Rust memory' rather than KCL's memory and cannot reach into the wider
/// environment.
///
/// Trying to read or write from this environment will panic with an index out of bounds.
pub fn push_new_env_for_rust_call(&mut self) {
self.call_stack.push(self.current_env);
// Rust functions shouldn't try to set or access anything in their environment, so don't
// waste time and space on a new env. Using usize::MAX means we'll get an overflow if we
// try to access anything rather than a silent error.
self.current_env = EnvironmentRef(usize::MAX, 0);
}
/// Push a new stack frame on to the call stack with no connection to a parent environment.
///
/// Suitable for executing a separate module.
@ -631,7 +647,7 @@ impl Stack {
let env = self.memory.get_env(self.current_env.index());
if env.contains_key(&key) {
return Err(KclError::new_value_already_defined(KclErrorDetails::new(
format!("Cannot redefine `{key}`"),
format!("Cannot redefine `{}`", key),
vec![source_range],
)));
}
@ -667,7 +683,7 @@ impl Stack {
env.contains_key(var)
}
/// Get a key from the first stack frame on the call stack.
/// Get a key from the first KCL (i.e., non-Rust) stack frame on the call stack.
pub fn get_from_call_stack(&self, key: &str, source_range: SourceRange) -> Result<(usize, &KclValue), KclError> {
if !self.current_env.skip_env() {
return Ok((self.current_env.1, self.get(key, source_range)?));
@ -679,7 +695,7 @@ impl Stack {
}
}
unreachable!("No frames on the stack?");
unreachable!("It can't be Rust frames all the way down");
}
/// Iterate over all keys in the current environment which satisfy the provided predicate.
@ -1031,7 +1047,7 @@ mod env {
}
/// Take all bindings from the environment.
pub(super) fn take_bindings(self: Pin<&mut Self>) -> impl Iterator<Item = (String, (usize, KclValue))> + use<> {
pub(super) fn take_bindings(self: Pin<&mut Self>) -> impl Iterator<Item = (String, (usize, KclValue))> {
// SAFETY: caller must have unique access since self is mut. We're not moving or invalidating `self`.
let bindings = std::mem::take(unsafe { self.bindings.get().as_mut().unwrap() });
bindings.into_iter()
@ -1201,6 +1217,24 @@ mod test {
assert_get_from(mem, "c", 5, callee);
}
#[test]
fn rust_env() {
let mem = &mut Stack::new_for_tests();
mem.add("a".to_owned(), val(1), sr()).unwrap();
mem.add("b".to_owned(), val(3), sr()).unwrap();
let sn = mem.snapshot();
mem.push_new_env_for_rust_call();
mem.push_new_env_for_call(sn);
assert_get(mem, "b", 3);
mem.add("b".to_owned(), val(4), sr()).unwrap();
assert_get(mem, "b", 4);
mem.pop_env();
mem.pop_env();
assert_get(mem, "b", 3);
}
#[test]
fn deep_call_env() {
let mem = &mut Stack::new_for_tests();

View File

@ -16,9 +16,10 @@ pub(crate) use import::PreImportedGeometry;
use indexmap::IndexMap;
pub use kcl_value::{KclObjectFields, KclValue};
use kcmc::{
ImageFormat, ModelingCmd, each_cmd as mcmd,
ok_response::{OkModelingCmdResponse, output::TakeSnapshot},
each_cmd as mcmd,
ok_response::{output::TakeSnapshot, OkModelingCmdResponse},
websocket::{ModelingSessionData, OkWebSocketResponseData},
ImageFormat, ModelingCmd,
};
use kittycad_modeling_cmds::{self as kcmc, id::ModelingCmdId};
pub use memory::EnvironmentRef;
@ -30,7 +31,6 @@ pub use state::{ExecState, MetaSettings};
use uuid::Uuid;
use crate::{
CompilationError, ExecError, KclErrorWithOutputs,
engine::{EngineManager, GridScaleBehavior},
errors::{KclError, KclErrorDetails},
execution::{
@ -43,6 +43,7 @@ use crate::{
modules::{ModuleId, ModulePath, ModuleRepr},
parsing::ast::types::{Expr, ImportPath, NodeRef},
source_range::SourceRange,
CompilationError, ExecError, KclErrorWithOutputs,
};
pub(crate) mod annotations;
@ -804,6 +805,43 @@ impl ExecutorContext {
Ok(outcome)
}
pub async fn run_additional(&self, program: crate::Program) -> Result<ExecOutcome, KclErrorWithOutputs> {
assert!(!self.is_mock());
let (program, exec_state, result) = match cache::read_old_ast().await {
Some(cached_state) => {
let mut exec_state = cached_state.reconstitute_exec_state();
exec_state.mut_stack().restore_env(cached_state.main.result_env);
let result = self.run_concurrent(&program, &mut exec_state, None, true).await;
(program, exec_state, result)
}
None => {
let mut exec_state = ExecState::new(self);
let result = self.run_concurrent(&program, &mut exec_state, None, false).await;
(program, exec_state, result)
}
};
// Throw the error.
let result = result?;
// Save this as the last successful execution to the cache.
cache::write_old_ast(GlobalState::new(
exec_state.clone(),
self.settings.clone(),
program.ast,
result.0,
))
.await;
let outcome = exec_state.into_exec_outcome(result.0, self).await;
Ok(outcome)
}
/// Perform the execution of a program.
///
/// To access non-fatal errors and warnings, extract them from the `ExecState`.
@ -1328,7 +1366,7 @@ impl ExecutorContext {
created: if deterministic_time {
Some("2021-01-01T00:00:00Z".parse().map_err(|e| {
KclError::new_internal(crate::errors::KclErrorDetails::new(
format!("Failed to parse date: {e}"),
format!("Failed to parse date: {}", e),
vec![SourceRange::default()],
))
})?)
@ -1408,7 +1446,7 @@ pub(crate) async fn parse_execute_with_project_dir(
engine: Arc::new(Box::new(
crate::engine::conn_mock::EngineConnection::new().await.map_err(|err| {
KclError::new_internal(crate::errors::KclErrorDetails::new(
format!("Failed to create mock engine connection: {err}"),
format!("Failed to create mock engine connection: {}", err),
vec![SourceRange::default()],
))
})?,
@ -1445,7 +1483,7 @@ mod tests {
use pretty_assertions::assert_eq;
use super::*;
use crate::{ModuleId, errors::KclErrorDetails, execution::memory::Stack};
use crate::{errors::KclErrorDetails, execution::memory::Stack, ModuleId};
/// Convenience function to get a JSON value from memory and unwrap.
#[track_caller]
@ -1920,22 +1958,6 @@ shape = layer() |> patternTransform(instances = 10, transform = transform)
);
}
#[tokio::test(flavor = "multi_thread")]
async fn pass_std_to_std() {
let ast = r#"sketch001 = startSketchOn(XY)
profile001 = circle(sketch001, center = [0, 0], radius = 2)
extrude001 = extrude(profile001, length = 5)
extrudes = patternLinear3d(
extrude001,
instances = 3,
distance = 5,
axis = [1, 1, 0],
)
clone001 = map(extrudes, f = clone)
"#;
parse_execute(ast).await.unwrap();
}
#[tokio::test(flavor = "multi_thread")]
async fn test_zero_param_fn() {
let ast = r#"sigmaAllow = 35000 // psi
@ -2060,7 +2082,8 @@ notFunction = !x";
fn_err
.message()
.starts_with("Cannot apply unary operator ! to non-boolean value: "),
"Actual error: {fn_err:?}"
"Actual error: {:?}",
fn_err
);
let code8 = "
@ -2073,7 +2096,8 @@ notTagDeclarator = !myTagDeclarator";
tag_declarator_err
.message()
.starts_with("Cannot apply unary operator ! to non-boolean value: a tag declarator"),
"Actual error: {tag_declarator_err:?}"
"Actual error: {:?}",
tag_declarator_err
);
let code9 = "
@ -2086,7 +2110,8 @@ notTagIdentifier = !myTag";
tag_identifier_err
.message()
.starts_with("Cannot apply unary operator ! to non-boolean value: a tag identifier"),
"Actual error: {tag_identifier_err:?}"
"Actual error: {:?}",
tag_identifier_err
);
let code10 = "notPipe = !(1 |> 2)";
@ -2238,7 +2263,7 @@ w = f() + f()
if let Err(err) = ctx.run_with_caching(old_program).await {
let report = err.into_miette_report_with_outputs(code).unwrap();
let report = miette::Report::new(report);
panic!("Error executing program: {report:?}");
panic!("Error executing program: {:?}", report);
}
// Get the id_generator from the first execution.

View File

@ -8,10 +8,10 @@ use uuid::Uuid;
#[cfg(feature = "artifact-graph")]
use crate::exec::ArtifactCommand;
use crate::{
ExecState, ExecutorContext, KclError, SourceRange,
exec::{IdGenerator, KclValue},
execution::Solid,
std::Args,
ExecState, ExecutorContext, KclError, SourceRange,
};
/// Context and metadata needed to send a single modeling command.

View File

@ -9,19 +9,20 @@ use uuid::Uuid;
#[cfg(feature = "artifact-graph")]
use crate::execution::{Artifact, ArtifactCommand, ArtifactGraph, ArtifactId};
use crate::{
CompilationError, EngineManager, ExecutorContext, KclErrorWithOutputs,
errors::{KclError, KclErrorDetails, Severity},
exec::DefaultPlanes,
execution::{
EnvironmentRef, ExecOutcome, ExecutorSettings, KclValue, UnitAngle, UnitLen, annotations,
annotations,
cad_op::Operation,
id_generator::IdGenerator,
memory::{ProgramMemory, Stack},
types::{self, NumericType},
EnvironmentRef, ExecOutcome, ExecutorSettings, KclValue, UnitAngle, UnitLen,
},
modules::{ModuleId, ModuleInfo, ModuleLoader, ModulePath, ModuleRepr, ModuleSource},
parsing::ast::types::{Annotation, NodeRef},
source_range::SourceRange,
CompilationError, EngineManager, ExecutorContext, KclErrorWithOutputs,
};
/// State for executing a program.
@ -554,7 +555,7 @@ impl MetaSettings {
annotations::SETTINGS_UNIT_ANGLE
),
vec![annotation.as_source_range()],
)));
)))
}
}
}

View File

@ -220,9 +220,9 @@ impl schemars::JsonSchema for TypedPath {
"TypedPath".to_owned()
}
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
// TODO: Actually generate a reasonable schema.
r#gen.subschema_for::<std::path::PathBuf>()
gen.subschema_for::<std::path::PathBuf>()
}
}

View File

@ -5,17 +5,17 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{
CompilationError, SourceRange,
execution::{
ExecState, Plane, PlaneInfo, Point3d,
kcl_value::{KclValue, TypeDef},
memory::{self},
ExecState, Plane, PlaneInfo, Point3d,
},
parsing::{
ast::types::{PrimitiveType as AstPrimitiveType, Type},
token::NumericSuffix,
},
std::args::{FromKclValue, TyF64},
CompilationError, SourceRange,
};
#[derive(Debug, Clone, PartialEq)]
@ -210,7 +210,7 @@ impl RuntimeType {
let ty_val = exec_state
.stack()
.get(&format!("{}{}", memory::TYPE_PREFIX, alias), source_range)
.map_err(|_| CompilationError::err(source_range, format!("Unknown type: {alias}")))?;
.map_err(|_| CompilationError::err(source_range, format!("Unknown type: {}", alias)))?;
Ok(match ty_val {
KclValue::Type { value, .. } => match value {
@ -241,7 +241,7 @@ impl RuntimeType {
"a tuple with values of types ({})",
tys.iter().map(Self::human_friendly_type).collect::<Vec<_>>().join(", ")
),
RuntimeType::Object(_) => format!("an object with fields {self}"),
RuntimeType::Object(_) => format!("an object with fields {}", self),
}
}
@ -840,18 +840,6 @@ pub enum UnitType {
Angle(UnitAngle),
}
impl UnitType {
pub(crate) fn to_suffix(self) -> Option<String> {
match self {
UnitType::Count => Some("_".to_owned()),
UnitType::Length(UnitLen::Unknown) => None,
UnitType::Angle(UnitAngle::Unknown) => None,
UnitType::Length(l) => Some(l.to_string()),
UnitType::Angle(a) => Some(a.to_string()),
}
}
}
impl std::fmt::Display for UnitType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@ -1541,7 +1529,7 @@ impl KclValue {
#[cfg(test)]
mod test {
use super::*;
use crate::execution::{ExecTestResults, parse_execute};
use crate::execution::{parse_execute, ExecTestResults};
fn values(exec_state: &mut ExecState) -> Vec<KclValue> {
vec![
@ -1987,16 +1975,14 @@ mod test {
])
)
);
assert!(
RuntimeType::Union(vec![
assert!(RuntimeType::Union(vec![
RuntimeType::Primitive(PrimitiveType::Number(NumericType::Any)),
RuntimeType::Primitive(PrimitiveType::Boolean)
])
.subtype(&RuntimeType::Union(vec![
RuntimeType::Primitive(PrimitiveType::Number(NumericType::Any)),
RuntimeType::Primitive(PrimitiveType::Boolean)
]))
);
])));
// Covariance
let count = KclValue::Number {

View File

@ -45,31 +45,6 @@ pub fn format_number_literal(value: f64, suffix: NumericSuffix) -> Result<String
}
}
#[derive(Debug, Clone, PartialEq, Serialize, thiserror::Error)]
#[serde(tag = "type")]
pub enum FormatNumericTypeError {
#[error("Invalid numeric type: {0:?}")]
Invalid(NumericType),
}
/// For UI code generation, format a number value with a suffix such that the
/// result can parse as a literal. If it can't be done, returns an error.
///
/// This is used by TS.
pub fn format_number_value(value: f64, ty: NumericType) -> Result<String, FormatNumericTypeError> {
match ty {
NumericType::Default { .. } => Ok(value.to_string()),
// There isn't a syntactic suffix for these. For unknown, we don't want
// to ever generate the unknown suffix. We currently warn on it, and we
// may remove it in the future.
NumericType::Unknown | NumericType::Any => Err(FormatNumericTypeError::Invalid(ty)),
NumericType::Known(unit_type) => unit_type
.to_suffix()
.map(|suffix| format!("{value}{suffix}"))
.ok_or(FormatNumericTypeError::Invalid(ty)),
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
@ -159,74 +134,4 @@ mod tests {
Err(FormatNumericSuffixError::Invalid(NumericSuffix::Unknown))
);
}
#[test]
fn test_format_number_value() {
assert_eq!(
format_number_value(
1.0,
NumericType::Default {
len: Default::default(),
angle: Default::default()
}
),
Ok("1".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Unknown))),
Err(FormatNumericTypeError::Invalid(NumericType::Known(UnitType::Length(
UnitLen::Unknown
))))
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Unknown))),
Err(FormatNumericTypeError::Invalid(NumericType::Known(UnitType::Angle(
UnitAngle::Unknown
))))
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Count)),
Ok("1_".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Mm))),
Ok("1mm".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Cm))),
Ok("1cm".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::M))),
Ok("1m".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Inches))),
Ok("1in".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Feet))),
Ok("1ft".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Yards))),
Ok("1yd".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Degrees))),
Ok("1deg".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Radians))),
Ok("1rad".to_owned())
);
assert_eq!(
format_number_value(1.0, NumericType::Unknown),
Err(FormatNumericTypeError::Invalid(NumericType::Unknown))
);
assert_eq!(
format_number_value(1.0, NumericType::Any),
Err(FormatNumericTypeError::Invalid(NumericType::Any))
);
}
}

View File

@ -3,10 +3,10 @@
use anyhow::Result;
use crate::{
SourceRange,
errors::{KclError, KclErrorDetails},
execution::typed_path::TypedPath,
fs::FileSystem,
SourceRange,
};
#[derive(Debug, Clone)]

View File

@ -2,7 +2,7 @@
use anyhow::Result;
use crate::{SourceRange, execution::typed_path::TypedPath};
use crate::{execution::typed_path::TypedPath, SourceRange};
#[cfg(not(target_arch = "wasm32"))]
pub mod local;

View File

@ -4,11 +4,11 @@ use anyhow::Result;
use wasm_bindgen::prelude::wasm_bindgen;
use crate::{
SourceRange,
errors::{KclError, KclErrorDetails},
execution::typed_path::TypedPath,
fs::FileSystem,
wasm::JsFuture,
SourceRange,
};
#[wasm_bindgen(module = "/../../src/lang/std/fileSystemManager.ts")]

View File

@ -90,9 +90,10 @@ pub use errors::{
ReportWithOutputs,
};
pub use execution::{
ExecOutcome, ExecState, ExecutorContext, ExecutorSettings, MetaSettings, Point2d, bust_cache, clear_mem_cache,
bust_cache, clear_mem_cache,
typed_path::TypedPath,
types::{UnitAngle, UnitLen},
ExecOutcome, ExecState, ExecutorContext, ExecutorSettings, MetaSettings, Point2d,
};
pub use lsp::{
copilot::Backend as CopilotLspBackend,
@ -100,7 +101,7 @@ pub use lsp::{
};
pub use modules::ModuleId;
pub use parsing::ast::types::{FormatOptions, NodePath, Step as NodePathStep};
pub use settings::types::{Configuration, UnitLength, project::ProjectConfiguration};
pub use settings::types::{project::ProjectConfiguration, Configuration, UnitLength};
pub use source_range::SourceRange;
#[cfg(not(target_arch = "wasm32"))]
pub use unparser::{recast_dir, walk_dir};
@ -108,12 +109,12 @@ pub use unparser::{recast_dir, walk_dir};
// Rather than make executor public and make lots of it pub(crate), just re-export into a new module.
// Ideally we wouldn't export these things at all, they should only be used for testing.
pub mod exec {
pub use crate::execution::{
types::{NumericType, UnitAngle, UnitLen, UnitType},
DefaultPlanes, IdGenerator, KclValue, PlaneType, Sketch,
};
#[cfg(feature = "artifact-graph")]
pub use crate::execution::{ArtifactCommand, Operation};
pub use crate::execution::{
DefaultPlanes, IdGenerator, KclValue, PlaneType, Sketch,
types::{NumericType, UnitAngle, UnitLen, UnitType},
};
}
#[cfg(target_arch = "wasm32")]
@ -135,12 +136,12 @@ pub mod native_engine {
}
pub mod std_utils {
pub use crate::std::utils::{TangentialArcInfoInput, get_tangential_arc_to_info, is_points_ccw_wasm};
pub use crate::std::utils::{get_tangential_arc_to_info, is_points_ccw_wasm, TangentialArcInfoInput};
}
pub mod pretty {
pub use crate::{
fmt::{format_number_literal, format_number_value, human_display_number},
fmt::{format_number_literal, human_display_number},
parsing::token::NumericSuffix,
};
}
@ -159,7 +160,7 @@ lazy_static::lazy_static! {
#[cfg(feature = "cli")]
let named_extensions = kittycad::types::FileImportFormat::value_variants()
.iter()
.map(|x| format!("{x}"))
.map(|x| format!("{}", x))
.collect::<Vec<String>>();
#[cfg(not(feature = "cli"))]
let named_extensions = vec![]; // We don't really need this outside of the CLI.
@ -275,25 +276,41 @@ impl Program {
#[inline]
fn try_f64_to_usize(f: f64) -> Option<usize> {
let i = f as usize;
if i as f64 == f { Some(i) } else { None }
if i as f64 == f {
Some(i)
} else {
None
}
}
#[inline]
fn try_f64_to_u32(f: f64) -> Option<u32> {
let i = f as u32;
if i as f64 == f { Some(i) } else { None }
if i as f64 == f {
Some(i)
} else {
None
}
}
#[inline]
fn try_f64_to_u64(f: f64) -> Option<u64> {
let i = f as u64;
if i as f64 == f { Some(i) } else { None }
if i as f64 == f {
Some(i)
} else {
None
}
}
#[inline]
fn try_f64_to_i64(f: f64) -> Option<i64> {
let i = f as i64;
if i as f64 == f { Some(i) } else { None }
if i as f64 == f {
Some(i)
} else {
None
}
}
/// Get the version of the KCL library.

View File

@ -2,19 +2,19 @@ use anyhow::Result;
use convert_case::Casing;
use crate::{
SourceRange,
errors::Suggestion,
lint::rule::{Discovered, Finding, def_finding},
lint::rule::{def_finding, Discovered, Finding},
parsing::ast::types::{Node as AstNode, ObjectProperty, Program, VariableDeclarator},
walk::Node,
SourceRange,
};
def_finding!(
Z0001,
"Identifiers should be lowerCamelCase",
"Identifiers must be lowerCamelCase",
"\
By convention, variable names are lowerCamelCase, not snake_case, kebab-case,
nor upper CamelCase (aka PascalCase). 🐪
nor CammelCase. 🐪
For instance, a good identifier for the variable representing 'box height'
would be 'boxHeight', not 'BOX_HEIGHT', 'box_height' nor 'BoxHeight'. For
@ -38,12 +38,12 @@ fn lint_lower_camel_case_var(decl: &VariableDeclarator, prog: &AstNode<Program>)
let recast = prog.recast(&Default::default(), 0);
let suggestion = Suggestion {
title: format!("rename '{name}' to '{new_name}'"),
title: format!("rename '{}' to '{}'", name, new_name),
insert: recast,
source_range: prog.as_source_range(),
};
findings.push(Z0001.at(
format!("found '{name}'"),
format!("found '{}'", name),
SourceRange::new(ident.start, ident.end, ident.module_id),
Some(suggestion.clone()),
));
@ -61,7 +61,7 @@ fn lint_lower_camel_case_property(decl: &ObjectProperty, _prog: &AstNode<Program
if !name.is_case(convert_case::Case::Camel) {
// We can't rename the properties yet.
findings.push(Z0001.at(
format!("found '{name}'"),
format!("found '{}'", name),
SourceRange::new(ident.start, ident.end, ident.module_id),
None,
));
@ -93,7 +93,7 @@ pub fn lint_object_properties(decl: Node, prog: &AstNode<Program>) -> Result<Vec
#[cfg(test)]
mod tests {
use super::{Z0001, lint_object_properties, lint_variables};
use super::{lint_object_properties, lint_variables, Z0001};
use crate::lint::rule::{assert_finding, test_finding, test_no_finding};
#[tokio::test]

View File

@ -4,7 +4,7 @@ use crate::{
errors::Suggestion,
lint::{
checks::offset_plane::start_sketch_on_check_specific_plane,
rule::{Discovered, Finding, def_finding},
rule::{def_finding, Discovered, Finding},
},
parsing::ast::types::{Node as AstNode, Program},
walk::Node,
@ -33,11 +33,14 @@ pub fn lint_should_be_default_plane(node: Node, _prog: &AstNode<Program>) -> Res
}
let suggestion = Suggestion {
title: "use defaultPlane instead".to_owned(),
insert: format!("{plane_name}"),
insert: format!("{}", plane_name),
source_range: call_source_range,
};
Ok(vec![Z0002.at(
format!("custom plane in startSketchOn; defaultPlane {plane_name} would work here"),
format!(
"custom plane in startSketchOn; defaultPlane {} would work here",
plane_name
),
call_source_range,
Some(suggestion),
)])
@ -45,7 +48,7 @@ pub fn lint_should_be_default_plane(node: Node, _prog: &AstNode<Program>) -> Res
#[cfg(test)]
mod tests {
use super::{Z0002, lint_should_be_default_plane};
use super::{lint_should_be_default_plane, Z0002};
use crate::lint::rule::{test_finding, test_no_finding};
test_finding!(

View File

@ -2,6 +2,6 @@ mod camel_case;
mod default_plane;
mod offset_plane;
pub use camel_case::{Z0001, lint_object_properties, lint_variables};
pub use default_plane::{Z0002, lint_should_be_default_plane};
pub use offset_plane::{Z0003, lint_should_be_offset_plane};
pub use camel_case::{lint_object_properties, lint_variables, Z0001};
pub use default_plane::{lint_should_be_default_plane, Z0002};
pub use offset_plane::{lint_should_be_offset_plane, Z0003};

View File

@ -1,15 +1,15 @@
use anyhow::Result;
use crate::{
SourceRange,
engine::{DEFAULT_PLANE_INFO, PlaneName},
engine::{PlaneName, DEFAULT_PLANE_INFO},
errors::Suggestion,
execution::{PlaneInfo, Point3d, types::UnitLen},
lint::rule::{Discovered, Finding, def_finding},
execution::{types::UnitLen, PlaneInfo, Point3d},
lint::rule::{def_finding, Discovered, Finding},
parsing::ast::types::{
BinaryPart, CallExpressionKw, Expr, LiteralValue, Node as AstNode, ObjectExpression, Program, UnaryOperator,
},
walk::Node,
SourceRange,
};
def_finding!(
@ -39,11 +39,14 @@ pub fn lint_should_be_offset_plane(node: Node, _prog: &AstNode<Program>) -> Resu
}
let suggestion = Suggestion {
title: "use offsetPlane instead".to_owned(),
insert: format!("offsetPlane({plane_name}, offset = {offset})"),
insert: format!("offsetPlane({}, offset = {})", plane_name, offset),
source_range: call_source_range,
};
Ok(vec![Z0003.at(
format!("custom plane in startSketchOn; offsetPlane from {plane_name} would work here"),
format!(
"custom plane in startSketchOn; offsetPlane from {} would work here",
plane_name
),
call_source_range,
Some(suggestion),
)])
@ -65,16 +68,16 @@ fn get_xyz(point: &ObjectExpression) -> Option<(f64, f64, f64)> {
for property in &point.properties {
let Some(value) = (match &property.value {
Expr::UnaryExpression(value) => {
Expr::UnaryExpression(ref value) => {
if value.operator != UnaryOperator::Neg {
continue;
}
let BinaryPart::Literal(value) = &value.inner.argument else {
let BinaryPart::Literal(ref value) = &value.inner.argument else {
continue;
};
unlitafy(&value.inner.value).map(|v| -v)
}
Expr::Literal(value) => unlitafy(&value.value),
Expr::Literal(ref value) => unlitafy(&value.value),
_ => {
continue;
}
@ -268,7 +271,7 @@ fn normalize_plane_info(plane_info: &PlaneInfo) -> PlaneInfo {
#[cfg(test)]
mod tests {
use super::{Z0003, lint_should_be_offset_plane};
use super::{lint_should_be_offset_plane, Z0003};
use crate::lint::rule::{test_finding, test_no_finding};
test_finding!(

View File

@ -4,11 +4,11 @@ use serde::Serialize;
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
use crate::{
SourceRange,
errors::Suggestion,
lsp::IntoDiagnostic,
parsing::ast::types::{Node as AstNode, Program},
walk::Node,
SourceRange,
};
/// Check the provided AST for any found rule violations.
@ -180,7 +180,7 @@ impl Finding {
}
macro_rules! def_finding {
( $code:ident, $title:expr_2021, $description:expr_2021 ) => {
( $code:ident, $title:expr, $description:expr ) => {
/// Generated Finding
pub const $code: Finding = $crate::lint::rule::finding!($code, $title, $description);
};
@ -188,7 +188,7 @@ macro_rules! def_finding {
pub(crate) use def_finding;
macro_rules! finding {
( $code:ident, $title:expr_2021, $description:expr_2021 ) => {
( $code:ident, $title:expr, $description:expr ) => {
$crate::lint::rule::Finding {
code: stringify!($code),
title: $title,
@ -205,7 +205,7 @@ pub(crate) use test::{assert_finding, assert_no_finding, test_finding, test_no_f
mod test {
macro_rules! assert_no_finding {
( $check:expr_2021, $finding:expr_2021, $kcl:expr_2021 ) => {
( $check:expr, $finding:expr, $kcl:expr ) => {
let prog = $crate::Program::parse_no_errs($kcl).unwrap();
// Ensure the code still works.
@ -220,7 +220,7 @@ mod test {
}
macro_rules! assert_finding {
( $check:expr_2021, $finding:expr_2021, $kcl:expr_2021, $output:expr_2021, $suggestion:expr_2021 ) => {
( $check:expr, $finding:expr, $kcl:expr, $output:expr, $suggestion:expr ) => {
let prog = $crate::Program::parse_no_errs($kcl).unwrap();
// Ensure the code still works.
@ -250,7 +250,7 @@ mod test {
}
macro_rules! test_finding {
( $name:ident, $check:expr_2021, $finding:expr_2021, $kcl:expr_2021, $output:expr_2021, $suggestion:expr_2021 ) => {
( $name:ident, $check:expr, $finding:expr, $kcl:expr, $output:expr, $suggestion:expr ) => {
#[tokio::test]
async fn $name() {
$crate::lint::rule::assert_finding!($check, $finding, $kcl, $output, $suggestion);
@ -259,7 +259,7 @@ mod test {
}
macro_rules! test_no_finding {
( $name:ident, $check:expr_2021, $finding:expr_2021, $kcl:expr_2021 ) => {
( $name:ident, $check:expr, $finding:expr, $kcl:expr ) => {
#[tokio::test]
async fn $name() {
$crate::lint::rule::assert_no_finding!($check, $finding, $kcl);

View File

@ -90,7 +90,7 @@ where
async fn do_initialized(&self, params: InitializedParams) {
self.client()
.log_message(MessageType::INFO, format!("initialized: {params:?}"))
.log_message(MessageType::INFO, format!("initialized: {:?}", params))
.await;
self.set_is_initialized(true).await;
@ -139,7 +139,7 @@ where
self.client()
.log_message(
MessageType::WARNING,
format!("updating from disk `{project_dir}` failed: {err:?}"),
format!("updating from disk `{}` failed: {:?}", project_dir, err),
)
.await;
}
@ -148,19 +148,19 @@ where
async fn do_did_change_configuration(&self, params: DidChangeConfigurationParams) {
self.client()
.log_message(MessageType::INFO, format!("configuration changed: {params:?}"))
.log_message(MessageType::INFO, format!("configuration changed: {:?}", params))
.await;
}
async fn do_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
self.client()
.log_message(MessageType::INFO, format!("watched files changed: {params:?}"))
.log_message(MessageType::INFO, format!("watched files changed: {:?}", params))
.await;
}
async fn do_did_create_files(&self, params: CreateFilesParams) {
self.client()
.log_message(MessageType::INFO, format!("files created: {params:?}"))
.log_message(MessageType::INFO, format!("files created: {:?}", params))
.await;
// Create each file in the code map.
for file in params.files {
@ -170,7 +170,7 @@ where
async fn do_did_rename_files(&self, params: RenameFilesParams) {
self.client()
.log_message(MessageType::INFO, format!("files renamed: {params:?}"))
.log_message(MessageType::INFO, format!("files renamed: {:?}", params))
.await;
// Rename each file in the code map.
for file in params.files {
@ -186,7 +186,7 @@ where
async fn do_did_delete_files(&self, params: DeleteFilesParams) {
self.client()
.log_message(MessageType::INFO, format!("files deleted: {params:?}"))
.log_message(MessageType::INFO, format!("files deleted: {:?}", params))
.await;
// Delete each file in the map.
for file in params.files {
@ -228,7 +228,7 @@ where
async fn do_did_close(&self, params: DidCloseTextDocumentParams) {
self.client()
.log_message(MessageType::INFO, format!("document closed: {params:?}"))
.log_message(MessageType::INFO, format!("document closed: {:?}", params))
.await;
}
}

View File

@ -13,7 +13,6 @@ use std::{
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use tower_lsp::{
LanguageServer,
jsonrpc::{Error, Result},
lsp_types::{
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
@ -23,6 +22,7 @@ use tower_lsp::{
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
},
LanguageServer,
};
use crate::lsp::{
@ -198,7 +198,7 @@ impl Backend {
.map_err(|err| Error {
code: tower_lsp::jsonrpc::ErrorCode::from(69),
data: None,
message: Cow::from(format!("Failed to get completions from zoo api: {err}")),
message: Cow::from(format!("Failed to get completions from zoo api: {}", err)),
})?;
Ok(resp.completions)
}
@ -209,7 +209,7 @@ impl Backend {
let mut lock = copy.write().map_err(|err| Error {
code: tower_lsp::jsonrpc::ErrorCode::from(69),
data: None,
message: Cow::from(format!("Failed lock: {err}")),
message: Cow::from(format!("Failed lock: {}", err)),
})?;
*lock = params;
Ok(Success::new(true))
@ -254,7 +254,7 @@ impl Backend {
.map_err(|err| Error {
code: tower_lsp::jsonrpc::ErrorCode::from(69),
data: None,
message: Cow::from(format!("Failed to get completions: {err}")),
message: Cow::from(format!("Failed to get completions: {}", err)),
})?;
#[cfg(not(test))]
let mut completion_list = vec![];
@ -294,7 +294,7 @@ part001 = cube(pos = [0,0], scale = 20)
pub async fn accept_completion(&self, params: CopilotAcceptCompletionParams) {
self.client
.log_message(MessageType::INFO, format!("Accepted completions: {params:?}"))
.log_message(MessageType::INFO, format!("Accepted completions: {:?}", params))
.await;
// Get the original telemetry data.
@ -303,7 +303,7 @@ part001 = cube(pos = [0,0], scale = 20)
};
self.client
.log_message(MessageType::INFO, format!("Original telemetry: {original:?}"))
.log_message(MessageType::INFO, format!("Original telemetry: {:?}", original))
.await;
// TODO: Send the telemetry data to the zoo api.
@ -311,7 +311,7 @@ part001 = cube(pos = [0,0], scale = 20)
pub async fn reject_completions(&self, params: CopilotRejectCompletionParams) {
self.client
.log_message(MessageType::INFO, format!("Rejected completions: {params:?}"))
.log_message(MessageType::INFO, format!("Rejected completions: {:?}", params))
.await;
// Get the original telemetry data.
@ -323,7 +323,7 @@ part001 = cube(pos = [0,0], scale = 20)
}
self.client
.log_message(MessageType::INFO, format!("Original telemetry: {originals:?}"))
.log_message(MessageType::INFO, format!("Original telemetry: {:?}", originals))
.await;
// TODO: Send the telemetry data to the zoo api.

View File

@ -85,7 +85,7 @@ impl CopilotCompletionResponse {
impl CopilotCyclingCompletion {
pub fn new(text: String, line_before: String, position: CopilotPosition) -> Self {
let display_text = text.clone();
let text = format!("{line_before}{text}");
let text = format!("{}{}", line_before, text);
let end_char = text.find('\n').unwrap_or(text.len()) as u32;
Self {
uuid: uuid::Uuid::new_v4(),

View File

@ -3,7 +3,7 @@ use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::Range as LspRange;
use crate::{SourceRange, parsing::ast::types::*};
use crate::{parsing::ast::types::*, SourceRange};
/// Describes information about a hover.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]

View File

@ -15,7 +15,6 @@ use dashmap::DashMap;
use sha2::Digest;
use tokio::sync::RwLock;
use tower_lsp::{
Client, LanguageServer,
jsonrpc::Result as RpcResult,
lsp_types::{
CodeAction, CodeActionKind, CodeActionOptions, CodeActionOrCommand, CodeActionParams,
@ -38,10 +37,10 @@ use tower_lsp::{
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions,
WorkspaceEdit, WorkspaceFolder, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
},
Client, LanguageServer,
};
use crate::{
ModuleId, Program, SourceRange,
docs::kcl_doc::ModData,
errors::LspSuggestion,
exec::KclValue,
@ -52,10 +51,11 @@ use crate::{
util::IntoDiagnostic,
},
parsing::{
PIPE_OPERATOR,
ast::types::{Expr, VariableKind},
token::TokenStream,
PIPE_OPERATOR,
},
ModuleId, Program, SourceRange,
};
pub mod custom_notifications;
@ -290,9 +290,10 @@ impl crate::lsp::backend::Backend for Backend {
};
// Get the previous tokens.
let tokens_changed = match self.token_map.get(&filename) {
Some(previous_tokens) => *previous_tokens != tokens,
_ => true,
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
*previous_tokens != tokens
} else {
true
};
let had_diagnostics = self.has_diagnostics(params.uri.as_ref()).await;
@ -423,7 +424,7 @@ impl Backend {
self.client
.log_message(
MessageType::ERROR,
format!("token type `{token_type:?}` not accounted for"),
format!("token type `{:?}` not accounted for", token_type),
)
.await;
continue;
@ -435,8 +436,7 @@ impl Backend {
// Calculate the token modifiers.
// Get the value at the current position.
let token_modifiers_bitset = match self.ast_map.get(params.uri.as_str()) {
Some(ast) => {
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
let token_index = Arc::new(Mutex::new(token_type_index));
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
crate::walk::walk(&ast.ast, |node: crate::walk::Node| {
@ -538,18 +538,17 @@ impl Backend {
})
.unwrap_or_default();
let t = match token_index.lock() {
Ok(guard) => *guard,
_ => 0,
};
let t = if let Ok(guard) = token_index.lock() { *guard } else { 0 };
token_type_index = t;
match modifier_index.lock() {
Ok(guard) => *guard,
_ => 0,
}
}
_ => 0,
let m = if let Ok(guard) = modifier_index.lock() {
*guard
} else {
0
};
m
} else {
0
};
// We need to check if we are on the last token of the line.
@ -653,14 +652,11 @@ impl Backend {
.await;
}
let mut items = match self.diagnostics_map.get(params.uri.as_str()) {
Some(items) => {
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
// TODO: Would be awesome to fix the clone here.
items.clone()
}
_ => {
} else {
vec![]
}
};
for diagnostic in diagnostics {
@ -772,7 +768,7 @@ impl Backend {
// Read hash digest and consume hasher
let result = hasher.finalize();
// Get the hash as a string.
let user_id_hash = format!("{result:x}");
let user_id_hash = format!("{:x}", result);
// Get the workspace folders.
// The key of the workspace folder is the project name.
@ -870,7 +866,7 @@ impl Backend {
impl LanguageServer for Backend {
async fn initialize(&self, params: InitializeParams) -> RpcResult<InitializeResult> {
self.client
.log_message(MessageType::INFO, format!("initialize: {params:?}"))
.log_message(MessageType::INFO, format!("initialize: {:?}", params))
.await;
Ok(InitializeResult {
@ -1010,7 +1006,7 @@ impl LanguageServer for Backend {
#[cfg(not(target_arch = "wasm32"))]
if let Err(err) = self.send_telemetry().await {
self.client
.log_message(MessageType::WARNING, format!("failed to send telemetry: {err}"))
.log_message(MessageType::WARNING, format!("failed to send telemetry: {}", err))
.await;
}
}
@ -1094,7 +1090,7 @@ impl LanguageServer for Backend {
Ok(Some(LspHover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("```\n{name}{sig}\n```\n\n{docs}"),
value: format!("```\n{}{}\n```\n\n{}", name, sig, docs),
}),
range: Some(range),
}))
@ -1122,7 +1118,7 @@ impl LanguageServer for Backend {
Ok(Some(LspHover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("```\n{name}\n```\n\n{docs}"),
value: format!("```\n{}\n```\n\n{}", name, docs),
}),
range: Some(range),
}))
@ -1157,17 +1153,17 @@ impl LanguageServer for Backend {
} => Ok(Some(LspHover {
contents: HoverContents::Markup(MarkupContent {
kind: MarkupKind::Markdown,
value: format!("```\n{name}: {ty}\n```"),
value: format!("```\n{}: {}\n```", name, ty),
}),
range: Some(range),
})),
Hover::Variable { name, ty: None, range } => Ok(with_cached_var(&name, |value| {
let mut text: String = format!("```\n{name}");
let mut text: String = format!("```\n{}", name);
if let Some(ty) = value.principal_type() {
text.push_str(&format!(": {}", ty.human_friendly_type()));
}
if let Some(v) = value.value_str() {
text.push_str(&format!(" = {v}"));
text.push_str(&format!(" = {}", v));
}
text.push_str("\n```");

View File

@ -13,8 +13,8 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, DiagnosticTag};
pub use util::IntoDiagnostic;
use crate::{
CompilationError,
errors::{Severity, Tag},
CompilationError,
};
impl IntoDiagnostic for CompilationError {

View File

@ -2,18 +2,18 @@ use std::collections::{BTreeMap, HashMap};
use pretty_assertions::assert_eq;
use tower_lsp::{
LanguageServer,
lsp_types::{
CodeActionKind, CodeActionOrCommand, Diagnostic, PrepareRenameResponse, SemanticTokenModifier,
SemanticTokenType, TextEdit, WorkspaceEdit,
},
LanguageServer,
};
use crate::{
SourceRange,
errors::{LspSuggestion, Suggestion},
lsp::test_util::{copilot_lsp_server, kcl_lsp_server},
parsing::ast::types::{Node, Program},
SourceRange,
};
#[track_caller]
@ -276,7 +276,11 @@ async fn test_updating_kcl_lsp_files() {
assert_eq!(server.code_map.len(), 11);
// Just make sure that one of the current files read from disk is accurate.
assert_eq!(
server.code_map.get(&format!("{string_path}/util.rs")).unwrap().clone(),
server
.code_map
.get(&format!("{}/util.rs", string_path))
.unwrap()
.clone(),
include_str!("util.rs").as_bytes()
);
}
@ -629,7 +633,7 @@ async fn test_kcl_lsp_create_zip() {
}
assert_eq!(files.len(), 12);
let util_path = format!("{string_path}/util.rs").replace("file://", "");
let util_path = format!("{}/util.rs", string_path).replace("file://", "");
assert!(files.contains_key(&util_path));
assert_eq!(files.get("/test.kcl"), Some(&4));
}
@ -2355,7 +2359,7 @@ async fn test_kcl_lsp_diagnostic_has_lints() {
assert_eq!(diagnostics.full_document_diagnostic_report.items.len(), 1);
assert_eq!(
diagnostics.full_document_diagnostic_report.items[0].message,
"Identifiers should be lowerCamelCase"
"Identifiers must be lowerCamelCase"
);
} else {
panic!("Expected full diagnostics");

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::{
errors::{KclError, KclErrorDetails},
exec::KclValue,
execution::{EnvironmentRef, ModuleArtifactState, PreImportedGeometry, typed_path::TypedPath},
execution::{typed_path::TypedPath, EnvironmentRef, ModuleArtifactState, PreImportedGeometry},
fs::{FileManager, FileSystem},
parsing::ast::types::{ImportPath, Node, Program},
source_range::SourceRange,
@ -73,13 +73,13 @@ impl ModuleLoader {
}
pub(crate) fn enter_module(&mut self, path: &ModulePath) {
if let ModulePath::Local { value: path } = path {
if let ModulePath::Local { value: ref path } = path {
self.import_stack.push(path.clone());
}
}
pub(crate) fn leave_module(&mut self, path: &ModulePath) {
if let ModulePath::Local { value: path } = path {
if let ModulePath::Local { value: ref path } = path {
let popped = self.import_stack.pop().unwrap();
assert_eq!(path, &popped);
}

View File

@ -2,8 +2,8 @@ pub(crate) mod digest;
pub mod types;
use crate::{
ModuleId,
parsing::ast::types::{BinaryPart, BodyItem, Expr, LiteralIdentifier},
ModuleId,
};
impl BodyItem {

View File

@ -25,14 +25,15 @@ pub use crate::parsing::ast::types::{
none::KclNone,
};
use crate::{
ModuleId, TypedPath,
errors::KclError,
execution::{
KclValue, Metadata, TagIdentifier, annotations,
annotations,
types::{ArrayLen, UnitAngle, UnitLen},
KclValue, Metadata, TagIdentifier,
},
parsing::{PIPE_OPERATOR, ast::digest::Digest, token::NumericSuffix},
parsing::{ast::digest::Digest, token::NumericSuffix, PIPE_OPERATOR},
source_range::SourceRange,
ModuleId, TypedPath,
};
mod condition;
@ -71,18 +72,18 @@ impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
T::schema_name()
}
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
let mut child = T::json_schema(r#gen).into_object();
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
let mut child = T::json_schema(gen).into_object();
// We want to add the start and end fields to the schema.
// Ideally we would add _any_ extra fields from the Node type automatically
// but this is a bit hard since this isn't a macro.
let Some(object) = &mut child.object else {
let Some(ref mut object) = &mut child.object else {
// This should never happen. But it will panic at compile time of docs if it does.
// Which is better than runtime.
panic!("Expected object schema for {}", T::schema_name());
};
object.properties.insert("start".to_string(), usize::json_schema(r#gen));
object.properties.insert("end".to_string(), usize::json_schema(r#gen));
object.properties.insert("start".to_string(), usize::json_schema(gen));
object.properties.insert("end".to_string(), usize::json_schema(gen));
schemars::schema::Schema::Object(child.clone())
}
@ -680,7 +681,7 @@ impl Program {
break;
}
}
BodyItem::VariableDeclaration(variable_declaration) => {
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
if let Some(var_old_name) = variable_declaration.rename_symbol(new_name, pos) {
old_name = Some(var_old_name);
break;
@ -704,16 +705,18 @@ impl Program {
// Recurse over the item.
let mut value = match item {
BodyItem::ImportStatement(_) => None, // TODO
BodyItem::ExpressionStatement(expression_statement) => Some(&mut expression_statement.expression),
BodyItem::VariableDeclaration(variable_declaration) => {
BodyItem::ExpressionStatement(ref mut expression_statement) => {
Some(&mut expression_statement.expression)
}
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
variable_declaration.get_mut_expr_for_position(pos)
}
BodyItem::TypeDeclaration(_) => None,
BodyItem::ReturnStatement(return_statement) => Some(&mut return_statement.argument),
BodyItem::ReturnStatement(ref mut return_statement) => Some(&mut return_statement.argument),
};
// Check if we have a function expression.
if let Some(Expr::FunctionExpression(function_expression)) = &mut value {
if let Some(Expr::FunctionExpression(ref mut function_expression)) = &mut value {
// Check if the params to the function expression contain the position.
for param in &mut function_expression.params {
let param_source_range: SourceRange = (&param.identifier).into();
@ -761,7 +764,7 @@ impl Program {
BodyItem::ExpressionStatement(_) => {
continue;
}
BodyItem::VariableDeclaration(variable_declaration) => {
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
if variable_declaration.declaration.id.name == name {
variable_declaration.declaration = declarator;
return;
@ -780,14 +783,14 @@ impl Program {
for item in &mut self.body {
match item {
BodyItem::ImportStatement(_) => {} // TODO
BodyItem::ExpressionStatement(expression_statement) => expression_statement
BodyItem::ExpressionStatement(ref mut expression_statement) => expression_statement
.expression
.replace_value(source_range, new_value.clone()),
BodyItem::VariableDeclaration(variable_declaration) => {
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
variable_declaration.replace_value(source_range, new_value.clone())
}
BodyItem::TypeDeclaration(_) => {}
BodyItem::ReturnStatement(return_statement) => {
BodyItem::ReturnStatement(ref mut return_statement) => {
return_statement.argument.replace_value(source_range, new_value.clone())
}
}
@ -1037,18 +1040,18 @@ impl Expr {
}
match self {
Expr::BinaryExpression(bin_exp) => bin_exp.replace_value(source_range, new_value),
Expr::ArrayExpression(array_exp) => array_exp.replace_value(source_range, new_value),
Expr::ArrayRangeExpression(array_range) => array_range.replace_value(source_range, new_value),
Expr::ObjectExpression(obj_exp) => obj_exp.replace_value(source_range, new_value),
Expr::BinaryExpression(ref mut bin_exp) => bin_exp.replace_value(source_range, new_value),
Expr::ArrayExpression(ref mut array_exp) => array_exp.replace_value(source_range, new_value),
Expr::ArrayRangeExpression(ref mut array_range) => array_range.replace_value(source_range, new_value),
Expr::ObjectExpression(ref mut obj_exp) => obj_exp.replace_value(source_range, new_value),
Expr::MemberExpression(_) => {}
Expr::Literal(_) => {}
Expr::FunctionExpression(func_exp) => func_exp.replace_value(source_range, new_value),
Expr::CallExpressionKw(call_exp) => call_exp.replace_value(source_range, new_value),
Expr::FunctionExpression(ref mut func_exp) => func_exp.replace_value(source_range, new_value),
Expr::CallExpressionKw(ref mut call_exp) => call_exp.replace_value(source_range, new_value),
Expr::Name(_) => {}
Expr::TagDeclarator(_) => {}
Expr::PipeExpression(pipe_exp) => pipe_exp.replace_value(source_range, new_value),
Expr::UnaryExpression(unary_exp) => unary_exp.replace_value(source_range, new_value),
Expr::PipeExpression(ref mut pipe_exp) => pipe_exp.replace_value(source_range, new_value),
Expr::UnaryExpression(ref mut unary_exp) => unary_exp.replace_value(source_range, new_value),
Expr::IfExpression(_) => {}
Expr::PipeSubstitution(_) => {}
Expr::LabelledExpression(expr) => expr.expr.replace_value(source_range, new_value),
@ -1110,19 +1113,25 @@ impl Expr {
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
match self {
Expr::Literal(_literal) => {}
Expr::Name(identifier) => identifier.rename(old_name, new_name),
Expr::TagDeclarator(tag) => tag.rename(old_name, new_name),
Expr::BinaryExpression(binary_expression) => binary_expression.rename_identifiers(old_name, new_name),
Expr::Name(ref mut identifier) => identifier.rename(old_name, new_name),
Expr::TagDeclarator(ref mut tag) => tag.rename(old_name, new_name),
Expr::BinaryExpression(ref mut binary_expression) => {
binary_expression.rename_identifiers(old_name, new_name)
}
Expr::FunctionExpression(_function_identifier) => {}
Expr::CallExpressionKw(call_expression) => call_expression.rename_identifiers(old_name, new_name),
Expr::PipeExpression(pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
Expr::CallExpressionKw(ref mut call_expression) => call_expression.rename_identifiers(old_name, new_name),
Expr::PipeExpression(ref mut pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
Expr::PipeSubstitution(_) => {}
Expr::ArrayExpression(array_expression) => array_expression.rename_identifiers(old_name, new_name),
Expr::ArrayRangeExpression(array_range) => array_range.rename_identifiers(old_name, new_name),
Expr::ObjectExpression(object_expression) => object_expression.rename_identifiers(old_name, new_name),
Expr::MemberExpression(member_expression) => member_expression.rename_identifiers(old_name, new_name),
Expr::UnaryExpression(unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
Expr::IfExpression(expr) => expr.rename_identifiers(old_name, new_name),
Expr::ArrayExpression(ref mut array_expression) => array_expression.rename_identifiers(old_name, new_name),
Expr::ArrayRangeExpression(ref mut array_range) => array_range.rename_identifiers(old_name, new_name),
Expr::ObjectExpression(ref mut object_expression) => {
object_expression.rename_identifiers(old_name, new_name)
}
Expr::MemberExpression(ref mut member_expression) => {
member_expression.rename_identifiers(old_name, new_name)
}
Expr::UnaryExpression(ref mut unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
Expr::IfExpression(ref mut expr) => expr.rename_identifiers(old_name, new_name),
Expr::LabelledExpression(expr) => expr.expr.rename_identifiers(old_name, new_name),
Expr::AscribedExpression(expr) => expr.expr.rename_identifiers(old_name, new_name),
Expr::None(_) => {}
@ -1316,9 +1325,15 @@ impl BinaryPart {
match self {
BinaryPart::Literal(_) => {}
BinaryPart::Name(_) => {}
BinaryPart::BinaryExpression(binary_expression) => binary_expression.replace_value(source_range, new_value),
BinaryPart::CallExpressionKw(call_expression) => call_expression.replace_value(source_range, new_value),
BinaryPart::UnaryExpression(unary_expression) => unary_expression.replace_value(source_range, new_value),
BinaryPart::BinaryExpression(ref mut binary_expression) => {
binary_expression.replace_value(source_range, new_value)
}
BinaryPart::CallExpressionKw(ref mut call_expression) => {
call_expression.replace_value(source_range, new_value)
}
BinaryPart::UnaryExpression(ref mut unary_expression) => {
unary_expression.replace_value(source_range, new_value)
}
BinaryPart::MemberExpression(_) => {}
BinaryPart::IfExpression(e) => e.replace_value(source_range, new_value),
BinaryPart::AscribedExpression(e) => e.expr.replace_value(source_range, new_value),
@ -1355,13 +1370,21 @@ impl BinaryPart {
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
match self {
BinaryPart::Literal(_literal) => {}
BinaryPart::Name(identifier) => identifier.rename(old_name, new_name),
BinaryPart::BinaryExpression(binary_expression) => binary_expression.rename_identifiers(old_name, new_name),
BinaryPart::CallExpressionKw(call_expression) => call_expression.rename_identifiers(old_name, new_name),
BinaryPart::UnaryExpression(unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
BinaryPart::MemberExpression(member_expression) => member_expression.rename_identifiers(old_name, new_name),
BinaryPart::IfExpression(if_expression) => if_expression.rename_identifiers(old_name, new_name),
BinaryPart::AscribedExpression(e) => e.expr.rename_identifiers(old_name, new_name),
BinaryPart::Name(ref mut identifier) => identifier.rename(old_name, new_name),
BinaryPart::BinaryExpression(ref mut binary_expression) => {
binary_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::CallExpressionKw(ref mut call_expression) => {
call_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::UnaryExpression(ref mut unary_expression) => {
unary_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::MemberExpression(ref mut member_expression) => {
member_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::IfExpression(ref mut if_expression) => if_expression.rename_identifiers(old_name, new_name),
BinaryPart::AscribedExpression(ref mut e) => e.expr.rename_identifiers(old_name, new_name),
}
}
}
@ -2801,7 +2824,7 @@ impl MemberExpression {
self.object.rename_identifiers(old_name, new_name);
match &mut self.property {
LiteralIdentifier::Identifier(identifier) => identifier.rename(old_name, new_name),
LiteralIdentifier::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
LiteralIdentifier::Literal(_) => {}
}
}
@ -3289,7 +3312,7 @@ impl Type {
.map(|t| t.human_friendly_type())
.collect::<Vec<_>>()
.join(" or "),
Type::Object { .. } => format!("an object with fields `{self}`"),
Type::Object { .. } => format!("an object with fields `{}`", self),
}
}
@ -3446,11 +3469,7 @@ pub struct RequiredParamAfterOptionalParam(pub Box<Parameter>);
impl std::fmt::Display for RequiredParamAfterOptionalParam {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"KCL functions must declare any optional parameters after all the required parameters. But your required parameter {} is _after_ an optional parameter. You must move it to before the optional parameters instead.",
self.0.identifier.name
)
write!(f, "KCL functions must declare any optional parameters after all the required parameters. But your required parameter {} is _after_ an optional parameter. You must move it to before the optional parameters instead.", self.0.identifier.name)
}
}

View File

@ -3,8 +3,8 @@
use super::CompilationError;
use crate::{
SourceRange,
parsing::ast::types::{BinaryExpression, BinaryOperator, BinaryPart, Node},
SourceRange,
};
/// Parses a list of tokens (in infix order, i.e. as the user typed them)
@ -127,11 +127,11 @@ impl From<BinaryOperator> for BinaryExpressionToken {
mod tests {
use super::*;
use crate::{
ModuleId,
parsing::{
ast::types::{Literal, LiteralValue},
token::NumericSuffix,
},
ModuleId,
};
#[test]

View File

@ -1,11 +1,11 @@
use crate::{
ModuleId,
errors::{CompilationError, KclError, KclErrorDetails},
parsing::{
ast::types::{Node, Program},
token::TokenStream,
},
source_range::SourceRange,
ModuleId,
};
pub(crate) mod ast;
@ -18,7 +18,7 @@ pub const PIPE_OPERATOR: &str = "|>";
// `?` like behavior for `Result`s to return a ParseResult if there is an error.
macro_rules! pr_try {
($e: expr_2021) => {
($e: expr) => {
match $e {
Ok(a) => a,
Err(e) => return e.into(),
@ -187,7 +187,7 @@ pub fn deprecation(s: &str, kind: DeprecationKind) -> Option<&'static str> {
#[cfg(test)]
mod tests {
macro_rules! parse_and_lex {
($func_name:ident, $test_kcl_program:expr_2021) => {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
let _ = crate::parsing::top_level_parse($test_kcl_program);

View File

@ -14,16 +14,14 @@ use winnow::{
};
use super::{
DeprecationKind,
ast::types::{AscribedExpression, ImportPath, LabelledExpression},
token::{NumericSuffix, RESERVED_WORDS},
DeprecationKind,
};
use crate::{
IMPORT_FILE_EXTENSIONS, SourceRange, TypedPath,
errors::{CompilationError, Severity, Tag},
execution::types::ArrayLen,
parsing::{
PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR,
ast::types::{
Annotation, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart, BodyItem,
BoxNode, CallExpressionKw, CommentStyle, DefaultParamVal, ElseIf, Expr, ExpressionStatement,
@ -35,7 +33,9 @@ use crate::{
},
math::BinaryExpressionToken,
token::{Token, TokenSlice, TokenType},
PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR,
},
SourceRange, TypedPath, IMPORT_FILE_EXTENSIONS,
};
thread_local! {
@ -602,7 +602,7 @@ fn binary_operator(i: &mut TokenSlice) -> ModalResult<BinaryOperator> {
return Err(CompilationError::fatal(
token.as_source_range(),
format!("{} is not a binary operator", token.value.as_str()),
));
))
}
};
Ok(op)
@ -726,7 +726,7 @@ fn shebang(i: &mut TokenSlice) -> ModalResult<Node<Shebang>> {
opt(whitespace).parse_next(i)?;
Ok(Node::new(
Shebang::new(format!("#!{value}")),
Shebang::new(format!("#!{}", value)),
0,
tokens.last().unwrap().end,
tokens.first().unwrap().module_id,
@ -1926,7 +1926,7 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
return Err(ErrMode::Cut(
CompilationError::fatal(
path_range,
format!("Invalid import path for import from std: {path_string}."),
format!("Invalid import path for import from std: {}.", path_string),
)
.into(),
));
@ -1938,10 +1938,7 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
if !IMPORT_FILE_EXTENSIONS.contains(&extn.to_string_lossy().to_string()) {
ParseContext::warn(CompilationError::err(
path_range,
format!(
"unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}",
IMPORT_FILE_EXTENSIONS.join(", ")
),
format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", IMPORT_FILE_EXTENSIONS.join(", ")),
))
}
ImportPath::Foreign {
@ -2213,7 +2210,7 @@ fn declaration(i: &mut TokenSlice) -> ModalResult<BoxNode<VariableDeclaration>>
if matches!(val, Expr::FunctionExpression(_)) {
return Err(CompilationError::fatal(
SourceRange::new(start, dec_end, id.module_id),
format!("Expected a `fn` variable kind, found: `{kind}`"),
format!("Expected a `fn` variable kind, found: `{}`", kind),
));
}
Ok(val)
@ -3315,10 +3312,10 @@ fn fn_call_kw(i: &mut TokenSlice) -> ModalResult<Node<CallExpressionKw>> {
ParseContext::warn(
CompilationError::err(
result.as_source_range(),
format!("Calling `{callee_str}` is deprecated, prefer using `{suggestion}`."),
format!("Calling `{}` is deprecated, prefer using `{}`.", callee_str, suggestion),
)
.with_suggestion(
format!("Replace `{callee_str}` with `{suggestion}`"),
format!("Replace `{}` with `{}`", callee_str, suggestion),
suggestion,
None,
Tag::Deprecated,
@ -3336,13 +3333,13 @@ mod tests {
use super::*;
use crate::{
ModuleId,
parsing::ast::types::{BodyItem, Expr, VariableKind},
ModuleId,
};
fn assert_reserved(word: &str) {
// Try to use it as a variable name.
let code = format!(r#"{word} = 0"#);
let code = format!(r#"{} = 0"#, word);
let result = crate::parsing::top_level_parse(code.as_str());
let err = &result.unwrap_errs().next().unwrap();
// Which token causes the error may change. In "return = 0", for
@ -5266,7 +5263,7 @@ mod snapshot_math_tests {
// The macro takes a KCL program, ensures it tokenizes and parses, then compares
// its parsed AST to a snapshot (kept in this repo in a file under snapshots/ dir)
macro_rules! snapshot_test {
($func_name:ident, $test_kcl_program:expr_2021) => {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
let module_id = crate::ModuleId::default();
@ -5304,7 +5301,7 @@ mod snapshot_tests {
// The macro takes a KCL program, ensures it tokenizes and parses, then compares
// its parsed AST to a snapshot (kept in this repo in a file under snapshots/ dir)
macro_rules! snapshot_test {
($func_name:ident, $test_kcl_program:expr_2021) => {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
let module_id = crate::ModuleId::default();

View File

@ -16,10 +16,10 @@ use winnow::{
};
use crate::{
CompilationError, ModuleId,
errors::KclError,
parsing::ast::types::{ItemVisibility, VariableKind},
source_range::SourceRange,
CompilationError, ModuleId,
};
mod tokeniser;
@ -609,7 +609,7 @@ impl From<ParseError<Input<'_>, winnow::error::ContextError>> for KclError {
// TODO: Add the Winnow parser context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::new_lexical(crate::errors::KclErrorDetails::new(
format!("found unknown token '{bad_token}'"),
format!("found unknown token '{}'", bad_token),
vec![SourceRange::new(offset, offset + 1, module_id)],
))
}

View File

@ -1,19 +1,19 @@
use fnv::FnvHashMap;
use lazy_static::lazy_static;
use winnow::{
LocatingSlice, Stateful,
ascii::{digit1, multispace1},
combinator::{alt, opt, peek, preceded, repeat},
error::{ContextError, ParseError},
prelude::*,
stream::{Location, Stream},
token::{any, none_of, take_till, take_until, take_while},
LocatingSlice, Stateful,
};
use super::TokenStream;
use crate::{
ModuleId,
parsing::token::{Token, TokenType},
ModuleId,
};
lazy_static! {

View File

@ -1,9 +1,9 @@
use std::path::PathBuf;
use schemars::{JsonSchema, r#gen::SchemaGenerator};
use serde_json::{Value, json};
use schemars::{gen::SchemaGenerator, JsonSchema};
use serde_json::{json, Value};
use crate::settings::types::{Configuration, project::ProjectConfiguration};
use crate::settings::types::{project::ProjectConfiguration, Configuration};
// Project settings example in TOML format
const PROJECT_SETTINGS_EXAMPLE: &str = r#"[settings.app]
@ -60,7 +60,7 @@ fn init_handlebars() -> handlebars::Handlebars<'static> {
let pretty_options = array
.iter()
.filter_map(|v| v.as_str())
.map(|s| format!("`{s}`"))
.map(|s| format!("`{}`", s))
.collect::<Vec<_>>()
.join(", ");
out.write(&pretty_options)?;
@ -89,17 +89,17 @@ fn init_handlebars() -> handlebars::Handlebars<'static> {
Value::Null => out.write("None")?,
Value::Bool(b) => out.write(&b.to_string())?,
Value::Number(n) => out.write(&n.to_string())?,
Value::String(s) => out.write(&format!("`{s}`"))?,
Value::String(s) => out.write(&format!("`{}`", s))?,
Value::Array(arr) => {
let formatted = arr
.iter()
.map(|v| match v {
Value::String(s) => format!("`{s}`"),
_ => format!("{v}"),
Value::String(s) => format!("`{}`", s),
_ => format!("{}", v),
})
.collect::<Vec<_>>()
.join(", ");
out.write(&format!("[{formatted}]"))?;
out.write(&format!("[{}]", formatted))?;
}
Value::Object(_) => out.write("(complex default)")?,
}
@ -122,7 +122,7 @@ pub fn generate_settings_docs() {
let hbs = init_handlebars();
// Generate project settings documentation
let mut settings = schemars::r#gen::SchemaSettings::default();
let mut settings = schemars::gen::SchemaSettings::default();
settings.inline_subschemas = true;
settings.meta_schema = None; // We don't need the meta schema for docs
settings.option_nullable = false; // Important - makes Option fields show properly

View File

@ -716,15 +716,13 @@ enable_ssao = false
let result = color.validate();
if let Ok(r) = result {
panic!("Expected an error, but got success: {r:?}");
panic!("Expected an error, but got success: {:?}", r);
}
assert!(result.is_err());
assert!(
result
assert!(result
.unwrap_err()
.to_string()
.contains("color: Validation error: color")
);
.contains("color: Validation error: color"));
let appearance = AppearanceSettings {
theme: AppTheme::System,
@ -732,15 +730,13 @@ enable_ssao = false
};
let result = appearance.validate();
if let Ok(r) = result {
panic!("Expected an error, but got success: {r:?}");
panic!("Expected an error, but got success: {:?}", r);
}
assert!(result.is_err());
assert!(
result
assert!(result
.unwrap_err()
.to_string()
.contains("color: Validation error: color")
);
.contains("color: Validation error: color"));
}
#[test]
@ -750,15 +746,13 @@ color = 1567.4"#;
let result = Configuration::parse_and_validate(settings_file);
if let Ok(r) = result {
panic!("Expected an error, but got success: {r:?}");
panic!("Expected an error, but got success: {:?}", r);
}
assert!(result.is_err());
assert!(
result
assert!(result
.unwrap_err()
.to_string()
.contains("color: Validation error: color")
);
.contains("color: Validation error: color"));
}
}

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use validator::Validate;
use crate::settings::types::{
AppColor, CommandBarSettings, DefaultTrue, OnboardingStatus, TextEditorSettings, UnitLength, is_default,
is_default, AppColor, CommandBarSettings, DefaultTrue, OnboardingStatus, TextEditorSettings, UnitLength,
};
/// Project specific settings for the app.
@ -203,16 +203,14 @@ color = 1567.4"#;
let result = ProjectConfiguration::parse_and_validate(settings_file);
if let Ok(r) = result {
panic!("Expected an error, but got success: {r:?}");
panic!("Expected an error, but got success: {:?}", r);
}
assert!(result.is_err());
assert!(
result
assert!(result
.unwrap_err()
.to_string()
.contains("color: Validation error: color")
);
.contains("color: Validation error: color"));
}
#[test]

View File

@ -1,14 +1,14 @@
use std::{
panic::{AssertUnwindSafe, catch_unwind},
panic::{catch_unwind, AssertUnwindSafe},
path::{Path, PathBuf},
};
use indexmap::IndexMap;
use crate::{
ExecOutcome, ExecState, ExecutorContext, ModuleId,
errors::KclError,
execution::{EnvironmentRef, ModuleArtifactState},
ExecOutcome, ExecState, ExecutorContext, ModuleId,
};
#[cfg(feature = "artifact-graph")]
use crate::{
@ -241,10 +241,7 @@ async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
Ok((exec_state, ctx, env_ref, png, step)) => {
let fail_path = test.output_dir.join("execution_error.snap");
if std::fs::exists(&fail_path).unwrap() {
panic!(
"This test case is expected to fail, but it passed. If this is intended, and the test should actually be passing now, please delete kcl-lib/{}",
fail_path.to_string_lossy()
)
panic!("This test case is expected to fail, but it passed. If this is intended, and the test should actually be passing now, please delete kcl-lib/{}", fail_path.to_string_lossy())
}
if render_to_png {
twenty_twenty::assert_image(test.output_dir.join(RENDERED_MODEL_NAME), &png, 0.99);
@ -290,13 +287,10 @@ async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
let report = error.clone().into_miette_report_with_outputs(&input).unwrap();
let report = miette::Report::new(report);
if previously_passed {
eprintln!(
"This test case failed, but it previously passed. If this is intended, and the test should actually be failing now, please delete kcl-lib/{} and other associated passing artifacts",
ok_path.to_string_lossy()
);
eprintln!("This test case failed, but it previously passed. If this is intended, and the test should actually be failing now, please delete kcl-lib/{} and other associated passing artifacts", ok_path.to_string_lossy());
panic!("{report:?}");
}
let report = format!("{report:?}");
let report = format!("{:?}", report);
let err_result = catch_unwind(AssertUnwindSafe(|| {
assert_snapshot(test, "Error from executing", || {

View File

@ -1,7 +1,7 @@
//! Run all the KCL samples in the `kcl_samples` directory.
use std::{
fs,
panic::{AssertUnwindSafe, catch_unwind},
panic::{catch_unwind, AssertUnwindSafe},
path::{Path, PathBuf},
};
@ -86,11 +86,7 @@ fn test_after_engine_ensure_kcl_samples_manifest_etc() {
.into_iter()
.filter(|name| !input_names.contains(name))
.collect::<Vec<_>>();
assert!(
missing.is_empty(),
"Expected input kcl-samples for the following. If these are no longer tests, delete the expected output directories for them in {}: {missing:?}",
OUTPUTS_DIR.to_string_lossy()
);
assert!(missing.is_empty(), "Expected input kcl-samples for the following. If these are no longer tests, delete the expected output directories for them in {}: {missing:?}", OUTPUTS_DIR.to_string_lossy());
// We want to move the screenshot for the inputs to the public/kcl-samples
// directory so that they can be used as inputs for the next run.
@ -193,7 +189,7 @@ fn kcl_samples_inputs() -> Vec<Test> {
let entry_point = if main_kcl_path.exists() {
main_kcl_path
} else {
panic!("No main.kcl found in {sub_dir:?}");
panic!("No main.kcl found in {:?}", sub_dir);
};
tests.push(test(&dir_name_str, entry_point));
}

View File

@ -1,7 +1,7 @@
//! Standard library appearance.
use anyhow::Result;
use kcmc::{ModelingCmd, each_cmd as mcmd};
use kcmc::{each_cmd as mcmd, ModelingCmd};
use kittycad_modeling_cmds::{self as kcmc, shared::Color};
use regex::Regex;
use rgba_simple::Hex;
@ -10,8 +10,8 @@ use super::args::TyF64;
use crate::{
errors::{KclError, KclErrorDetails},
execution::{
ExecState, KclValue, SolidOrImportedGeometry,
types::{ArrayLen, RuntimeType},
ExecState, KclValue, SolidOrImportedGeometry,
},
std::Args,
};
@ -63,7 +63,7 @@ pub async fn appearance(exec_state: &mut ExecState, args: Args) -> Result<KclVal
// Make sure the color if set is valid.
if !HEX_REGEX.is_match(&color) {
return Err(KclError::new_semantic(KclErrorDetails::new(
format!("Invalid hex color (`{color}`), try something like `#fff000`"),
format!("Invalid hex color (`{}`), try something like `#fff000`", color),
vec![args.source_range],
)));
}

View File

@ -7,13 +7,12 @@ use serde::Serialize;
use super::fillet::EdgeReference;
pub use crate::execution::fn_call::Args;
use crate::{
ModuleId,
errors::{KclError, KclErrorDetails},
execution::{
ExecState, ExtrudeSurface, Helix, KclObjectFields, KclValue, Metadata, PlaneInfo, Sketch, SketchSurface, Solid,
TagIdentifier,
kcl_value::FunctionSource,
types::{NumericType, PrimitiveType, RuntimeType, UnitAngle, UnitLen, UnitType},
ExecState, ExtrudeSurface, Helix, KclObjectFields, KclValue, Metadata, PlaneInfo, Sketch, SketchSurface, Solid,
TagIdentifier,
},
parsing::ast::types::TagNode,
source_range::SourceRange,
@ -22,6 +21,7 @@ use crate::{
sketch::FaceTag,
sweep::SweepPath,
},
ModuleId,
};
const ERROR_STRING_SKETCH_TO_SOLID_HELPER: &str =
@ -97,8 +97,8 @@ impl JsonSchema for TyF64 {
"TyF64".to_string()
}
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
r#gen.subschema_for::<f64>()
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
gen.subschema_for::<f64>()
}
}

View File

@ -1,15 +1,15 @@
use indexmap::IndexMap;
use crate::{
ExecutorContext,
errors::{KclError, KclErrorDetails},
execution::{
ExecState,
fn_call::{Arg, Args, KwArgs},
kcl_value::{FunctionSource, KclValue},
types::RuntimeType,
ExecState,
},
source_range::SourceRange,
ExecutorContext,
};
/// Apply a function to each element of an array.

View File

@ -5,14 +5,14 @@ use anyhow::Result;
use super::args::TyF64;
use crate::{
errors::{KclError, KclErrorDetails},
execution::{ExecState, KclValue, types::RuntimeType},
execution::{types::RuntimeType, ExecState, KclValue},
std::Args,
};
async fn _assert(value: bool, message: &str, args: &Args) -> Result<(), KclError> {
if !value {
return Err(KclError::new_type(KclErrorDetails::new(
format!("assert failed: {message}"),
format!("assert failed: {}", message),
vec![args.source_range],
)));
}

Some files were not shown because too many files have changed in this diff Show More