Compare commits
21 Commits
api-deux-p
...
nadro/adho
Author | SHA1 | Date | |
---|---|---|---|
53a646d12a | |||
85c721fb49 | |||
27af2d08a3 | |||
fb8b975b5e | |||
62d8d45a58 | |||
ae3440df0a | |||
af658c909d | |||
7ec11d23c8 | |||
30000a1eac | |||
cb3b45747c | |||
fe66310f2d | |||
fefb6cfe87 | |||
0f8375cbb4 | |||
107adc77b3 | |||
4356885aa2 | |||
6a2027cd51 | |||
e35bcf2f11 | |||
939d5ef3f7 | |||
8d19a955af | |||
625394d587 | |||
e6dd628736 |
2
.github/workflows/build-apps.yml
vendored
2
.github/workflows/build-apps.yml
vendored
@ -43,7 +43,7 @@ jobs:
|
||||
- name: Download Wasm Cache
|
||||
id: download-wasm
|
||||
if: ${{ github.event_name == 'pull_request' && steps.filter.outputs.rust == 'false' }}
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
continue-on-error: true
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
|
12
.github/workflows/cargo-test.yml
vendored
12
.github/workflows/cargo-test.yml
vendored
@ -25,8 +25,8 @@ jobs:
|
||||
- runner=8cpu-linux-x64
|
||||
- extras=s3-cache
|
||||
steps:
|
||||
- uses: runs-on/action@v1
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: runs-on/action@v2
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
@ -149,8 +149,8 @@ jobs:
|
||||
partitionIndex: [1, 2, 3, 4, 5, 6]
|
||||
partitionTotal: [6]
|
||||
steps:
|
||||
- uses: runs-on/action@v1
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: runs-on/action@v2
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
@ -207,8 +207,8 @@ jobs:
|
||||
- runner=32cpu-linux-x64
|
||||
- extras=s3-cache
|
||||
steps:
|
||||
- uses: runs-on/action@v1
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: runs-on/action@v2
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
|
6
.github/workflows/e2e-tests.yml
vendored
6
.github/workflows/e2e-tests.yml
vendored
@ -46,7 +46,7 @@ jobs:
|
||||
- name: Download Wasm cache
|
||||
id: download-wasm
|
||||
if: ${{ github.event_name != 'schedule' && steps.filter.outputs.rust == 'false' }}
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
uses: dawidd6/action-download-artifact@v11
|
||||
continue-on-error: true
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
@ -230,7 +230,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.MODELING_APP_GH_APP_ID }}
|
||||
|
2
.github/workflows/generate-website-docs.yml
vendored
2
.github/workflows/generate-website-docs.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
# required
|
||||
|
6
.github/workflows/kcl-python-bindings.yml
vendored
6
.github/workflows/kcl-python-bindings.yml
vendored
@ -113,7 +113,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- uses: taiki-e/install-action@just
|
||||
- name: Run tests
|
||||
@ -130,7 +130,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: Install codespell
|
||||
run: |
|
||||
uv venv .venv
|
||||
@ -161,7 +161,7 @@ jobs:
|
||||
with:
|
||||
path: rust/kcl-python-bindings
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v6
|
||||
- name: do uv things
|
||||
run: |
|
||||
cd rust/kcl-python-bindings
|
||||
|
@ -5,6 +5,7 @@ import { uuidv4 } from '@src/lib/utils'
|
||||
import type { HomePageFixture } from '@e2e/playwright/fixtures/homePageFixture'
|
||||
import type { SceneFixture } from '@e2e/playwright/fixtures/sceneFixture'
|
||||
import type { ToolbarFixture } from '@e2e/playwright/fixtures/toolbarFixture'
|
||||
import type { CmdBarFixture } from '@e2e/playwright/fixtures/cmdBarFixture'
|
||||
import { getUtils } from '@e2e/playwright/test-utils'
|
||||
import { expect, test } from '@e2e/playwright/zoo-test'
|
||||
|
||||
@ -14,13 +15,18 @@ test.describe('Can create sketches on all planes and their back sides', () => {
|
||||
homePage: HomePageFixture,
|
||||
scene: SceneFixture,
|
||||
toolbar: ToolbarFixture,
|
||||
cmdBar: CmdBarFixture,
|
||||
plane: string,
|
||||
clickCoords: { x: number; y: number }
|
||||
) => {
|
||||
const u = await getUtils(page)
|
||||
// await page.addInitScript(() => {
|
||||
// localStorage.setItem('persistCode', '@settings(defaultLengthUnit = in)')
|
||||
// })
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
|
||||
await homePage.goToModelingScene()
|
||||
// await scene.settled(cmdBar)
|
||||
const XYPlanRed: [number, number, number] = [98, 50, 51]
|
||||
await scene.expectPixelColor(XYPlanRed, { x: 700, y: 300 }, 15)
|
||||
|
||||
@ -119,12 +125,166 @@ test.describe('Can create sketches on all planes and their back sides', () => {
|
||||
]
|
||||
|
||||
for (const config of planeConfigs) {
|
||||
test(config.plane, async ({ page, homePage, scene, toolbar }) => {
|
||||
test(config.plane, async ({ page, homePage, scene, toolbar, cmdBar }) => {
|
||||
await sketchOnPlaneAndBackSideTest(
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
toolbar,
|
||||
cmdBar,
|
||||
config.plane,
|
||||
config.coords
|
||||
)
|
||||
})
|
||||
}
|
||||
})
|
||||
test.describe('Can create sketches on offset planes and their back sides', () => {
|
||||
const sketchOnPlaneAndBackSideTest = async (
|
||||
page: Page,
|
||||
homePage: HomePageFixture,
|
||||
scene: SceneFixture,
|
||||
toolbar: ToolbarFixture,
|
||||
cmdbar: CmdBarFixture,
|
||||
plane: string,
|
||||
clickCoords: { x: number; y: number }
|
||||
) => {
|
||||
const u = await getUtils(page)
|
||||
await page.addInitScript(() => {
|
||||
localStorage.setItem(
|
||||
'persistCode',
|
||||
`@settings(defaultLengthUnit = in)
|
||||
xyPlane = offsetPlane(XY, offset = 0.05)
|
||||
xzPlane = offsetPlane(XZ, offset = 0.05)
|
||||
yzPlane = offsetPlane(YZ, offset = 0.05)
|
||||
`
|
||||
)
|
||||
})
|
||||
await page.setBodyDimensions({ width: 1200, height: 500 })
|
||||
|
||||
await homePage.goToModelingScene()
|
||||
// await scene.settled(cmdbar)
|
||||
const XYPlanRed: [number, number, number] = [74, 74, 74]
|
||||
await scene.expectPixelColor(XYPlanRed, { x: 700, y: 300 }, 15)
|
||||
|
||||
await u.openDebugPanel()
|
||||
|
||||
const coord =
|
||||
plane === '-XY' || plane === '-YZ' || plane === 'XZ' ? -100 : 100
|
||||
const camCommand: EngineCommand = {
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_look_at',
|
||||
center: { x: 0, y: 0, z: 0 },
|
||||
vantage: { x: coord, y: coord, z: coord },
|
||||
up: { x: 0, y: 0, z: 1 },
|
||||
},
|
||||
}
|
||||
const updateCamCommand: EngineCommand = {
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'default_camera_get_settings',
|
||||
},
|
||||
}
|
||||
|
||||
const prefix = plane.length === 3 ? '-' : ''
|
||||
const planeName = plane
|
||||
.slice(plane.length === 3 ? 1 : 0)
|
||||
.toLocaleLowerCase()
|
||||
|
||||
const codeLine1 = `sketch001 = startSketchOn(${prefix}${planeName}Plane)`
|
||||
const codeLine2 = `profile001 = startProfile(sketch001, at = [${0.91 + (plane[0] === '-' ? 0.01 : 0)}, -${1.21 + (plane[0] === '-' ? 0.01 : 0)}])`
|
||||
|
||||
await u.openDebugPanel()
|
||||
|
||||
await u.clearCommandLogs()
|
||||
await page.getByRole('button', { name: 'Start Sketch' }).click()
|
||||
|
||||
await u.sendCustomCmd(camCommand)
|
||||
await page.waitForTimeout(100)
|
||||
await u.sendCustomCmd(updateCamCommand)
|
||||
|
||||
await u.closeDebugPanel()
|
||||
|
||||
await toolbar.openFeatureTreePane()
|
||||
await toolbar.getDefaultPlaneVisibilityButton('XY').click()
|
||||
await toolbar.getDefaultPlaneVisibilityButton('XZ').click()
|
||||
await toolbar.getDefaultPlaneVisibilityButton('YZ').click()
|
||||
await expect(
|
||||
toolbar
|
||||
.getDefaultPlaneVisibilityButton('YZ')
|
||||
.locator('[aria-label="eye crossed out"]')
|
||||
).toBeVisible()
|
||||
|
||||
await page.mouse.click(clickCoords.x, clickCoords.y)
|
||||
await page.waitForTimeout(600) // wait for animation
|
||||
|
||||
await toolbar.waitUntilSketchingReady()
|
||||
|
||||
await expect(
|
||||
page.getByRole('button', { name: 'line Line', exact: true })
|
||||
).toBeVisible()
|
||||
|
||||
await u.closeDebugPanel()
|
||||
await page.mouse.click(707, 393)
|
||||
|
||||
await expect(page.locator('.cm-content')).toContainText(codeLine1)
|
||||
await expect(page.locator('.cm-content')).toContainText(codeLine2)
|
||||
|
||||
await page
|
||||
.getByRole('button', { name: 'line Line', exact: true })
|
||||
.first()
|
||||
.click()
|
||||
await u.openAndClearDebugPanel()
|
||||
await page.getByRole('button', { name: 'Exit Sketch' }).click()
|
||||
await u.expectCmdLog('[data-message-type="execution-done"]')
|
||||
|
||||
await u.clearCommandLogs()
|
||||
await u.removeCurrentCode()
|
||||
}
|
||||
|
||||
const planeConfigs = [
|
||||
{
|
||||
plane: 'XY',
|
||||
coords: { x: 600, y: 388 },
|
||||
description: 'red plane',
|
||||
},
|
||||
{
|
||||
plane: 'YZ',
|
||||
coords: { x: 700, y: 250 },
|
||||
description: 'green plane',
|
||||
},
|
||||
{
|
||||
plane: 'XZ',
|
||||
coords: { x: 684, y: 427 },
|
||||
description: 'blue plane',
|
||||
},
|
||||
{
|
||||
plane: '-XY',
|
||||
coords: { x: 600, y: 118 },
|
||||
description: 'back of red plane',
|
||||
},
|
||||
{
|
||||
plane: '-YZ',
|
||||
coords: { x: 700, y: 219 },
|
||||
description: 'back of green plane',
|
||||
},
|
||||
{
|
||||
plane: '-XZ',
|
||||
coords: { x: 700, y: 80 },
|
||||
description: 'back of blue plane',
|
||||
},
|
||||
]
|
||||
|
||||
for (const config of planeConfigs) {
|
||||
test(config.plane, async ({ page, homePage, scene, toolbar, cmdBar }) => {
|
||||
await sketchOnPlaneAndBackSideTest(
|
||||
page,
|
||||
homePage,
|
||||
scene,
|
||||
toolbar,
|
||||
cmdBar,
|
||||
config.plane,
|
||||
config.coords
|
||||
)
|
||||
|
@ -525,7 +525,9 @@ test.describe('Command bar tests', () => {
|
||||
const projectName = 'test'
|
||||
const beforeKclCode = `a = 5
|
||||
b = a * a
|
||||
c = 3 + a`
|
||||
c = 3 + a
|
||||
theta = 45deg
|
||||
`
|
||||
await context.folderSetupFn(async (dir) => {
|
||||
const testProject = join(dir, projectName)
|
||||
await fsp.mkdir(testProject, { recursive: true })
|
||||
@ -615,9 +617,45 @@ c = 3 + a`
|
||||
stage: 'commandBarClosed',
|
||||
})
|
||||
})
|
||||
await test.step(`Edit a parameter with explicit units via command bar`, async () => {
|
||||
await cmdBar.cmdBarOpenBtn.click()
|
||||
await cmdBar.chooseCommand('edit parameter')
|
||||
await cmdBar
|
||||
.selectOption({
|
||||
name: 'theta',
|
||||
})
|
||||
.click()
|
||||
await cmdBar.expectState({
|
||||
stage: 'arguments',
|
||||
commandName: 'Edit parameter',
|
||||
currentArgKey: 'value',
|
||||
currentArgValue: '45deg',
|
||||
headerArguments: {
|
||||
Name: 'theta',
|
||||
Value: '',
|
||||
},
|
||||
highlightedHeaderArg: 'value',
|
||||
})
|
||||
await cmdBar.argumentInput
|
||||
.locator('[contenteditable]')
|
||||
.fill('45deg + 1deg')
|
||||
await cmdBar.progressCmdBar()
|
||||
await cmdBar.expectState({
|
||||
stage: 'review',
|
||||
commandName: 'Edit parameter',
|
||||
headerArguments: {
|
||||
Name: 'theta',
|
||||
Value: '46deg',
|
||||
},
|
||||
})
|
||||
await cmdBar.progressCmdBar()
|
||||
await cmdBar.expectState({
|
||||
stage: 'commandBarClosed',
|
||||
})
|
||||
})
|
||||
|
||||
await editor.expectEditor.toContain(
|
||||
`a = 5b = a * amyParameter001 = ${newValue}c = 3 + a`
|
||||
`a = 5b = a * amyParameter001 = ${newValue}c = 3 + atheta = 45deg + 1deg`
|
||||
)
|
||||
})
|
||||
|
||||
|
@ -274,6 +274,13 @@ export class ToolbarFixture {
|
||||
.nth(operationIndex)
|
||||
}
|
||||
|
||||
getDefaultPlaneVisibilityButton(plane: 'XY' | 'XZ' | 'YZ' = 'XY') {
|
||||
const index = plane === 'XZ' ? 0 : plane === 'XY' ? 1 : 2
|
||||
return this.featureTreePane
|
||||
.getByTestId('feature-tree-visibility-toggle')
|
||||
.nth(index)
|
||||
}
|
||||
|
||||
/**
|
||||
* View source on a specific operation in the Feature Tree pane.
|
||||
* @param operationName The name of the operation type
|
||||
|
@ -136,17 +136,17 @@ test.describe('Point-and-click tests', () => {
|
||||
highlightedHeaderArg: 'length',
|
||||
commandName: 'Extrude',
|
||||
})
|
||||
await page.keyboard.insertText('width - 0.001')
|
||||
await page.keyboard.insertText('width - 0.001in')
|
||||
await cmdBar.progressCmdBar()
|
||||
await cmdBar.expectState({
|
||||
stage: 'review',
|
||||
headerArguments: {
|
||||
Length: '4.999',
|
||||
Length: '4.999in',
|
||||
},
|
||||
commandName: 'Extrude',
|
||||
})
|
||||
await cmdBar.progressCmdBar()
|
||||
await editor.expectEditor.toContain('extrude(length = width - 0.001)')
|
||||
await editor.expectEditor.toContain('extrude(length = width - 0.001in)')
|
||||
})
|
||||
|
||||
await test.step(`Edit second extrude via feature tree`, async () => {
|
||||
|
18
flake.lock
generated
18
flake.lock
generated
@ -20,11 +20,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1745998881,
|
||||
"narHash": "sha256-vonyYAKJSlsX4n9GCsS0pHxR6yCrfqBIuGvANlkwG6U=",
|
||||
"lastModified": 1750865895,
|
||||
"narHash": "sha256-p2dWAQcLVzquy9LxYCZPwyUdugw78Qv3ChvnX755qHA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "423d2df5b04b4ee7688c3d71396e872afa236a89",
|
||||
"rev": "61c0f513911459945e2cb8bf333dc849f1b976ff",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -36,11 +36,11 @@
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1745998881,
|
||||
"narHash": "sha256-vonyYAKJSlsX4n9GCsS0pHxR6yCrfqBIuGvANlkwG6U=",
|
||||
"lastModified": 1750865895,
|
||||
"narHash": "sha256-p2dWAQcLVzquy9LxYCZPwyUdugw78Qv3ChvnX755qHA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "423d2df5b04b4ee7688c3d71396e872afa236a89",
|
||||
"rev": "61c0f513911459945e2cb8bf333dc849f1b976ff",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -78,11 +78,11 @@
|
||||
"nixpkgs": "nixpkgs_3"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1745980514,
|
||||
"narHash": "sha256-CITAeiuXGjDvT5iZBXr6vKVWQwsUQLJUMFO91bfJFC4=",
|
||||
"lastModified": 1750964660,
|
||||
"narHash": "sha256-YQ6EyFetjH1uy5JhdhRdPe6cuNXlYpMAQePFfZj4W7M=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "7fbdae44b0f40ea432e46fd152ad8be0f8f41ad6",
|
||||
"rev": "04f0fcfb1a50c63529805a798b4b5c21610ff390",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -125,8 +125,7 @@ test('Shows a loading spinner when uninitialized credit count', async () => {
|
||||
await expect(queryByTestId('spinner')).toBeVisible()
|
||||
})
|
||||
|
||||
test('Shows the total credits for Unknown subscription', async () => {
|
||||
const data = {
|
||||
const unKnownTierData = {
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 25,
|
||||
@ -135,8 +134,48 @@ test('Shows the total credits for Unknown subscription', async () => {
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
name: "unknown",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const freeTierData = {
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
name: "free",
|
||||
}
|
||||
}
|
||||
|
||||
const proTierData = {
|
||||
// These are all ignored
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
// This should be ignored because it's Pro tier.
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
name: "pro",
|
||||
}
|
||||
}
|
||||
|
||||
const enterpriseTierData = {
|
||||
// These are all ignored, user is part of an org.
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
// This should be ignored because it's Pro tier.
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
// This should be ignored because the user is part of an Org.
|
||||
name: "free",
|
||||
}
|
||||
}
|
||||
|
||||
test('Shows the total credits for Unknown subscription', async () => {
|
||||
const data = unKnownTierData
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
|
||||
@ -166,17 +205,7 @@ test('Shows the total credits for Unknown subscription', async () => {
|
||||
})
|
||||
|
||||
test('Progress bar reflects ratio left of Free subscription', async () => {
|
||||
const data = {
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
name: "free",
|
||||
}
|
||||
}
|
||||
|
||||
const data = freeTierData
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
|
||||
@ -212,19 +241,7 @@ test('Progress bar reflects ratio left of Free subscription', async () => {
|
||||
})
|
||||
})
|
||||
test('Shows infinite credits for Pro subscription', async () => {
|
||||
const data = {
|
||||
// These are all ignored
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
// This should be ignored because it's Pro tier.
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
name: "pro",
|
||||
}
|
||||
}
|
||||
|
||||
const data = proTierData
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
|
||||
@ -255,19 +272,7 @@ test('Shows infinite credits for Pro subscription', async () => {
|
||||
await expect(queryByTestId('billing-remaining-progress-bar-inline')).toBe(null)
|
||||
})
|
||||
test('Shows infinite credits for Enterprise subscription', async () => {
|
||||
const data = {
|
||||
// These are all ignored, user is part of an org.
|
||||
balance: {
|
||||
monthlyApiCreditsRemaining: 10,
|
||||
stableApiCreditsRemaining: 0,
|
||||
},
|
||||
subscriptions: {
|
||||
// This should be ignored because it's Pro tier.
|
||||
monthlyPayAsYouGoApiCreditsTotal: 20,
|
||||
// This should be ignored because the user is part of an Org.
|
||||
name: "free",
|
||||
}
|
||||
}
|
||||
const data = enterpriseTierData
|
||||
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
@ -297,3 +302,58 @@ test('Shows infinite credits for Enterprise subscription', async () => {
|
||||
await expect(queryByTestId('infinity')).toBeVisible()
|
||||
await expect(queryByTestId('billing-remaining-progress-bar-inline')).toBe(null)
|
||||
})
|
||||
|
||||
test('Show upgrade button if credits are not infinite', async () => {
|
||||
const data = freeTierData
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
|
||||
}),
|
||||
http.get('*/user/payment/subscriptions', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentSubscriptionsResponse(data.subscriptions))
|
||||
}),
|
||||
http.get('*/org', (req, res, ctx) => {
|
||||
return new HttpResponse(403)
|
||||
}),
|
||||
)
|
||||
|
||||
const billingActor = createActor(billingMachine, { input: BILLING_CONTEXT_DEFAULTS }).start()
|
||||
|
||||
const { queryByTestId } = render(<BillingDialog
|
||||
billingActor={billingActor}
|
||||
/>)
|
||||
|
||||
await act(() => {
|
||||
billingActor.send({ type: BillingTransition.Update, apiToken: "it doesn't matter wtf this is :)" })
|
||||
})
|
||||
|
||||
await expect(queryByTestId('billing-upgrade-button')).toBeVisible()
|
||||
})
|
||||
|
||||
test('Hide upgrade button if credits are infinite', async () => {
|
||||
const data = enterpriseTierData
|
||||
server.use(
|
||||
http.get('*/user/payment/balance', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentBalanceResponse(data.balance))
|
||||
}),
|
||||
http.get('*/user/payment/subscriptions', (req, res, ctx) => {
|
||||
return HttpResponse.json(createUserPaymentSubscriptionsResponse(data.subscriptions))
|
||||
}),
|
||||
// Ok finally the first use of an org lol
|
||||
http.get('*/org', (req, res, ctx) => {
|
||||
return HttpResponse.json(createOrgResponse())
|
||||
}),
|
||||
)
|
||||
|
||||
const billingActor = createActor(billingMachine, { input: BILLING_CONTEXT_DEFAULTS }).start()
|
||||
|
||||
const { queryByTestId } = render(<BillingDialog
|
||||
billingActor={billingActor}
|
||||
/>)
|
||||
|
||||
await act(() => {
|
||||
billingActor.send({ type: BillingTransition.Update, apiToken: "it doesn't matter wtf this is :)" })
|
||||
})
|
||||
|
||||
await expect(queryByTestId('billing-upgrade-button')).toBe(null)
|
||||
})
|
||||
|
1118
package-lock.json
generated
1118
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Before Width: | Height: | Size: 84 KiB After Width: | Height: | Size: 84 KiB |
124
rust/Cargo.lock
generated
124
rust/Cargo.lock
generated
@ -178,7 +178,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -189,7 +189,7 @@ checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -211,7 +211,7 @@ checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -514,7 +514,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -740,7 +740,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -751,7 +751,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -810,7 +810,7 @@ checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -831,7 +831,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -841,7 +841,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
|
||||
dependencies = [
|
||||
"derive_builder_core",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -906,7 +906,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -944,7 +944,7 @@ checksum = "a1ab991c1362ac86c61ab6f556cff143daa22e5a15e4e189df818b2fd19fe65b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1119,7 +1119,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1223,7 +1223,7 @@ dependencies = [
|
||||
"inflections",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1599,7 +1599,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1829,7 +1829,7 @@ version = "0.1.83"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1839,7 +1839,7 @@ dependencies = [
|
||||
"convert_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2071,9 +2071,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kittycad-modeling-cmds"
|
||||
version = "0.2.124"
|
||||
version = "0.2.125"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "221aa4670a7ad7dc8f1e4e0f9990bf3cff0a64417eb76493bafe5bbbc1f8350a"
|
||||
checksum = "cfd09d95f8bbeb090d4d1137c9bf421eb75763f7a30e4a9e8eefa249ddf20bd3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@ -2104,7 +2104,7 @@ dependencies = [
|
||||
"kittycad-modeling-cmds-macros-impl",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2115,7 +2115,7 @@ checksum = "fdb4ee23cc996aa2dca7584d410e8826e08161e1ac4335bb646d5ede33f37cb3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2311,7 +2311,7 @@ checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2640,7 +2640,7 @@ dependencies = [
|
||||
"regex",
|
||||
"regex-syntax 0.8.5",
|
||||
"structmeta",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2654,7 +2654,7 @@ dependencies = [
|
||||
"regex",
|
||||
"regex-syntax 0.8.5",
|
||||
"structmeta",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2710,7 +2710,7 @@ dependencies = [
|
||||
"pest_meta",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2754,7 +2754,7 @@ dependencies = [
|
||||
"phf_shared",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2809,7 +2809,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2921,7 +2921,7 @@ dependencies = [
|
||||
"proc-macro-error-attr2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2981,7 +2981,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2994,7 +2994,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-build-config",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3492,7 +3492,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3556,7 +3556,7 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3567,7 +3567,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3591,14 +3591,14 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.8"
|
||||
version = "0.6.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
|
||||
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@ -3815,7 +3815,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"structmeta-derive",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3826,7 +3826,7 @@ checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3848,7 +3848,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3891,9 +3891,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.103"
|
||||
version = "2.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
|
||||
checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -3917,7 +3917,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3941,7 +3941,7 @@ dependencies = [
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4050,7 +4050,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4061,7 +4061,7 @@ checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4173,7 +4173,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4217,9 +4217,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.22"
|
||||
version = "0.8.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae"
|
||||
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
@ -4238,9 +4238,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.26"
|
||||
version = "0.22.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
|
||||
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
|
||||
dependencies = [
|
||||
"indexmap 2.9.0",
|
||||
"serde",
|
||||
@ -4341,7 +4341,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4369,7 +4369,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4449,7 +4449,7 @@ checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
@ -4635,7 +4635,7 @@ dependencies = [
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4706,7 +4706,7 @@ dependencies = [
|
||||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@ -4742,7 +4742,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@ -4777,7 +4777,7 @@ checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5067,7 +5067,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@ -5112,7 +5112,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5123,7 +5123,7 @@ checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5143,7 +5143,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@ -5164,7 +5164,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5186,7 +5186,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.103",
|
||||
"syn 2.0.104",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -36,7 +36,7 @@ dashmap = { version = "6.1.0" }
|
||||
http = "1"
|
||||
indexmap = "2.9.0"
|
||||
kittycad = { version = "0.3.37", default-features = false, features = ["js", "requests"] }
|
||||
kittycad-modeling-cmds = { version = "0.2.124", features = ["ts-rs", "websocket"] }
|
||||
kittycad-modeling-cmds = { version = "0.2.125", features = ["ts-rs", "websocket"] }
|
||||
lazy_static = "1.5.0"
|
||||
miette = "7.6.0"
|
||||
pyo3 = { version = "0.24.2" }
|
||||
@ -60,6 +60,6 @@ lossy_float_literal = "warn"
|
||||
result_large_err = "allow"
|
||||
|
||||
# Example: how to point modeling-app at a different repo (e.g. a branch or a local clone)
|
||||
#[patch.crates-io]
|
||||
#kittycad-modeling-cmds = { path = "../../../modeling-api/modeling-cmds" }
|
||||
#kittycad-modeling-session = { path = "../../../modeling-api/modeling-session" }
|
||||
# [patch.crates-io]
|
||||
# kittycad-modeling-cmds = { path = "../../modeling-api/modeling-cmds/" }
|
||||
# kittycad-modeling-session = { path = "../../modeling-api/modeling-session" }
|
||||
|
@ -19,7 +19,7 @@ anyhow = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
semver = "1.0.25"
|
||||
serde = { workspace = true }
|
||||
toml_edit = "0.22.26"
|
||||
toml_edit = "0.22.27"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -14,7 +14,7 @@ bench = false
|
||||
[dependencies]
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "2.0.103", features = ["full"] }
|
||||
syn = { version = "2.0.104", features = ["full"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -14,7 +14,7 @@ bench = false
|
||||
convert_case = "0.8.0"
|
||||
proc-macro2 = "1"
|
||||
quote = "1"
|
||||
syn = { version = "2.0.103", features = ["full"] }
|
||||
syn = { version = "2.0.104", features = ["full"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -42,7 +42,7 @@ impl Build {
|
||||
.to_string();
|
||||
|
||||
if !stable {
|
||||
version = format!("{}-nightly", version);
|
||||
version = format!("{version}-nightly");
|
||||
}
|
||||
|
||||
let release_tag = if stable {
|
||||
@ -59,10 +59,7 @@ impl Build {
|
||||
if stable && !release_tag.contains(&version) {
|
||||
// bail early if the tag doesn't match the version
|
||||
// TODO: error here when we use the tags with kcl
|
||||
println!(
|
||||
"Tag {} doesn't match version {}. Did you forget to update Cargo.toml?",
|
||||
release_tag, version
|
||||
);
|
||||
println!("Tag {release_tag} doesn't match version {version}. Did you forget to update Cargo.toml?");
|
||||
}
|
||||
|
||||
build_server(sh, &version, &target)?;
|
||||
|
@ -95,10 +95,10 @@ async fn main() -> Result<()> {
|
||||
// Format fields using the provided closure.
|
||||
// We want to make this very concise otherwise the logs are not able to be read by humans.
|
||||
let format = tracing_subscriber::fmt::format::debug_fn(|writer, field, value| {
|
||||
if format!("{}", field) == "message" {
|
||||
write!(writer, "{}: {:?}", field, value)
|
||||
if format!("{field}") == "message" {
|
||||
write!(writer, "{field}: {value:?}")
|
||||
} else {
|
||||
write!(writer, "{}", field)
|
||||
write!(writer, "{field}")
|
||||
}
|
||||
})
|
||||
// Separate each field with a comma.
|
||||
|
@ -123,7 +123,7 @@
|
||||
"@vscode/test-electron": "^2.4.1",
|
||||
"@vscode/vsce": "^3.3.2",
|
||||
"cross-env": "^7.0.3",
|
||||
"esbuild": "^0.25.2",
|
||||
"esbuild": "^0.25.3",
|
||||
"glob": "^11.0.1",
|
||||
"mocha": "^11.1.0",
|
||||
"typescript": "^5.8.3"
|
||||
|
@ -87,10 +87,10 @@ async fn main() -> Result<()> {
|
||||
// Format fields using the provided closure.
|
||||
// We want to make this very concise otherwise the logs are not able to be read by humans.
|
||||
let format = tracing_subscriber::fmt::format::debug_fn(|writer, field, value| {
|
||||
if format!("{}", field) == "message" {
|
||||
write!(writer, "{}: {:?}", field, value)
|
||||
if format!("{field}") == "message" {
|
||||
write!(writer, "{field}: {value:?}")
|
||||
} else {
|
||||
write!(writer, "{}", field)
|
||||
write!(writer, "{field}")
|
||||
}
|
||||
})
|
||||
// Separate each field with a comma.
|
||||
@ -151,7 +151,7 @@ async fn run_cmd(opts: &Opts) -> Result<()> {
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Some(sig) = signals.forever().next() {
|
||||
log::info!("received signal: {:?}", sig);
|
||||
log::info!("received signal: {sig:?}");
|
||||
log::info!("triggering cleanup...");
|
||||
|
||||
// Exit the process.
|
||||
|
@ -2,10 +2,10 @@
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.2.83"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
rust-version = "1.83"
|
||||
rust-version = "1.88"
|
||||
authors = ["Jess Frazelle", "Adam Chalmers", "KittyCAD, Inc"]
|
||||
keywords = ["kcl", "KittyCAD", "CAD"]
|
||||
exclude = ["tests/*", "benches/*", "examples/*", "e2e/*", "bindings/*", "fuzz/*"]
|
||||
@ -74,7 +74,7 @@ sha2 = "0.10.9"
|
||||
tabled = { version = "0.20.0", optional = true }
|
||||
tempfile = "3.20"
|
||||
thiserror = "2.0.0"
|
||||
toml = "0.8.22"
|
||||
toml = "0.8.23"
|
||||
ts-rs = { version = "11.0.1", features = [
|
||||
"uuid-impl",
|
||||
"url-impl",
|
||||
|
@ -4,7 +4,7 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use criterion::{Criterion, criterion_group, criterion_main};
|
||||
|
||||
const IGNORE_DIRS: [&str; 2] = ["step", "screenshots"];
|
||||
|
||||
@ -61,7 +61,7 @@ fn run_benchmarks(c: &mut Criterion) {
|
||||
|
||||
// Read the file content (panic on failure)
|
||||
let input_content = fs::read_to_string(&input_file)
|
||||
.unwrap_or_else(|e| panic!("Failed to read main.kcl in directory {}: {}", dir_name, e));
|
||||
.unwrap_or_else(|e| panic!("Failed to read main.kcl in directory {dir_name}: {e}"));
|
||||
|
||||
// Create a benchmark group for this directory
|
||||
let mut group = c.benchmark_group(&dir_name);
|
||||
@ -72,12 +72,12 @@ fn run_benchmarks(c: &mut Criterion) {
|
||||
#[cfg(feature = "benchmark-execution")]
|
||||
let program = kcl_lib::Program::parse_no_errs(&input_content).unwrap();
|
||||
|
||||
group.bench_function(format!("parse_{}", dir_name), |b| {
|
||||
group.bench_function(format!("parse_{dir_name}"), |b| {
|
||||
b.iter(|| kcl_lib::Program::parse_no_errs(black_box(&input_content)).unwrap())
|
||||
});
|
||||
|
||||
#[cfg(feature = "benchmark-execution")]
|
||||
group.bench_function(format!("execute_{}", dir_name), |b| {
|
||||
group.bench_function(format!("execute_{dir_name}"), |b| {
|
||||
b.iter(|| {
|
||||
if let Err(err) = rt.block_on(async {
|
||||
let ctx = kcl_lib::ExecutorContext::new_with_default_client().await?;
|
||||
@ -86,7 +86,7 @@ fn run_benchmarks(c: &mut Criterion) {
|
||||
ctx.close().await;
|
||||
Ok::<(), anyhow::Error>(())
|
||||
}) {
|
||||
panic!("Failed to execute program: {}", err);
|
||||
panic!("Failed to execute program: {err}");
|
||||
}
|
||||
})
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::hint::black_box;
|
||||
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use criterion::{Criterion, criterion_group, criterion_main};
|
||||
|
||||
pub fn bench_parse(c: &mut Criterion) {
|
||||
for (name, file) in [
|
||||
|
@ -1,4 +1,4 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use criterion::{Criterion, criterion_group, criterion_main};
|
||||
|
||||
pub fn bench_digest(c: &mut Criterion) {
|
||||
for (name, file) in [
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::hint::black_box;
|
||||
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use criterion::{BenchmarkId, Criterion, criterion_group, criterion_main};
|
||||
use kcl_lib::kcl_lsp_server;
|
||||
use tokio::runtime::Runtime;
|
||||
use tower_lsp::LanguageServer;
|
||||
|
@ -1,9 +1,9 @@
|
||||
//! Cache testing framework.
|
||||
|
||||
use kcl_lib::{bust_cache, ExecError, ExecOutcome};
|
||||
use kcl_lib::{ExecError, ExecOutcome, bust_cache};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use kcl_lib::{exec::Operation, NodePathStep};
|
||||
use kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use kcl_lib::{NodePathStep, exec::Operation};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@ -38,7 +38,7 @@ async fn cache_test(
|
||||
if !variation.other_files.is_empty() {
|
||||
let tmp_dir = std::env::temp_dir();
|
||||
let tmp_dir = tmp_dir
|
||||
.join(format!("kcl_test_{}", test_name))
|
||||
.join(format!("kcl_test_{test_name}"))
|
||||
.join(uuid::Uuid::new_v4().to_string());
|
||||
|
||||
// Create a temporary file for each of the other files.
|
||||
@ -56,7 +56,7 @@ async fn cache_test(
|
||||
Err(error) => {
|
||||
let report = error.clone().into_miette_report_with_outputs(variation.code).unwrap();
|
||||
let report = miette::Report::new(report);
|
||||
panic!("{:?}", report);
|
||||
panic!("{report:?}");
|
||||
}
|
||||
};
|
||||
|
||||
@ -69,7 +69,7 @@ async fn cache_test(
|
||||
.and_then(|x| x.decode().map_err(|e| ExecError::BadPng(e.to_string())))
|
||||
.unwrap();
|
||||
// Save the snapshot.
|
||||
let path = crate::assert_out(&format!("cache_{}_{}", test_name, index), &img);
|
||||
let path = crate::assert_out(&format!("cache_{test_name}_{index}"), &img);
|
||||
|
||||
img_results.push((path, img, outcome));
|
||||
}
|
||||
@ -337,8 +337,7 @@ extrude001 = extrude(profile001, length = 4)
|
||||
// 0] as a more lenient check.
|
||||
.map(|c| !c.range.is_synthetic() && c.node_path.is_empty())
|
||||
.unwrap_or(false),
|
||||
"artifact={:?}",
|
||||
artifact
|
||||
"artifact={artifact:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
mod cache;
|
||||
|
||||
use kcl_lib::{
|
||||
test_server::{execute_and_export_step, execute_and_snapshot, execute_and_snapshot_no_auth},
|
||||
BacktraceItem, ExecError, ModuleId, SourceRange,
|
||||
test_server::{execute_and_export_step, execute_and_snapshot, execute_and_snapshot_no_auth},
|
||||
};
|
||||
|
||||
/// The minimum permissible difference between asserted twenty-twenty images.
|
||||
@ -869,11 +869,13 @@ async fn kcl_test_revolve_bad_angle_low() {
|
||||
let result = execute_and_snapshot(code, None).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("Expected angle to be between -360 and 360 and not 0, found `-455`"));
|
||||
.contains("Expected angle to be between -360 and 360 and not 0, found `-455`")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -895,11 +897,13 @@ async fn kcl_test_revolve_bad_angle_high() {
|
||||
let result = execute_and_snapshot(code, None).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("Expected angle to be between -360 and 360 and not 0, found `455`"));
|
||||
.contains("Expected angle to be between -360 and 360 and not 0, found `455`")
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -2090,7 +2094,10 @@ async fn kcl_test_better_type_names() {
|
||||
},
|
||||
None => todo!(),
|
||||
};
|
||||
assert_eq!(err, "This function expected the input argument to be one or more Solids or ImportedGeometry but it's actually of type Sketch. You can convert a sketch (2D) into a Solid (3D) by calling a function like `extrude` or `revolve`");
|
||||
assert_eq!(
|
||||
err,
|
||||
"This function expected the input argument to be one or more Solids or ImportedGeometry but it's actually of type Sketch. You can convert a sketch (2D) into a Solid (3D) by calling a function like `extrude` or `revolve`"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
|
@ -101,7 +101,7 @@ pub trait CoreDump: Clone {
|
||||
.meta()
|
||||
.create_debug_uploads(vec![kittycad::types::multipart::Attachment {
|
||||
name: "".to_string(),
|
||||
filepath: Some(format!(r#"modeling-app/coredump-{}.json"#, coredump_id).into()),
|
||||
filepath: Some(format!(r#"modeling-app/coredump-{coredump_id}.json"#).into()),
|
||||
content_type: Some("application/json".to_string()),
|
||||
data,
|
||||
}])
|
||||
|
@ -189,7 +189,7 @@ fn generate_example(index: usize, src: &str, props: &ExampleProperties, file_nam
|
||||
index
|
||||
);
|
||||
let image_data =
|
||||
std::fs::read(&image_path).unwrap_or_else(|_| panic!("Failed to read image file: {}", image_path));
|
||||
std::fs::read(&image_path).unwrap_or_else(|_| panic!("Failed to read image file: {image_path}"));
|
||||
base64::engine::general_purpose::STANDARD.encode(&image_data)
|
||||
};
|
||||
|
||||
@ -225,7 +225,7 @@ fn generate_type_from_kcl(ty: &TyData, file_name: String, example_name: String,
|
||||
|
||||
let output = hbs.render("kclType", &data)?;
|
||||
let output = cleanup_types(&output, kcl_std);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -267,7 +267,7 @@ fn generate_mod_from_kcl(m: &ModData, file_name: String) -> Result<()> {
|
||||
});
|
||||
|
||||
let output = hbs.render("module", &data)?;
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -334,7 +334,7 @@ fn generate_function_from_kcl(
|
||||
|
||||
let output = hbs.render("function", &data)?;
|
||||
let output = &cleanup_types(&output, kcl_std);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), output);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -378,7 +378,7 @@ fn generate_const_from_kcl(cnst: &ConstData, file_name: String, example_name: St
|
||||
|
||||
let output = hbs.render("const", &data)?;
|
||||
let output = cleanup_types(&output, kcl_std);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{}.md", file_name), &output);
|
||||
expectorate::assert_contents(format!("../../docs/kcl-std/{file_name}.md"), &output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ use tower_lsp::lsp_types::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ModuleId,
|
||||
execution::annotations,
|
||||
parsing::{
|
||||
ast::types::{
|
||||
@ -15,7 +16,6 @@ use crate::{
|
||||
},
|
||||
token::NumericSuffix,
|
||||
},
|
||||
ModuleId,
|
||||
};
|
||||
|
||||
pub fn walk_prelude() -> ModData {
|
||||
@ -97,7 +97,7 @@ fn visit_module(name: &str, preferred_prefix: &str, names: WalkForNames) -> Resu
|
||||
ImportSelector::None { .. } => {
|
||||
let name = import.module_name().unwrap();
|
||||
if names.contains(&name) {
|
||||
Some(visit_module(&path[1], &format!("{}::", name), WalkForNames::All)?)
|
||||
Some(visit_module(&path[1], &format!("{name}::"), WalkForNames::All)?)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -451,7 +451,7 @@ impl ModData {
|
||||
let (name, qual_name, module_name) = if name == "prelude" {
|
||||
("std", "std".to_owned(), String::new())
|
||||
} else {
|
||||
(name, format!("std::{}", name), "std".to_owned())
|
||||
(name, format!("std::{name}"), "std".to_owned())
|
||||
};
|
||||
Self {
|
||||
preferred_name: format!("{preferred_prefix}{name}"),
|
||||
@ -767,14 +767,12 @@ impl ArgData {
|
||||
for s in &arr.elements {
|
||||
let Expr::Literal(lit) = s else {
|
||||
panic!(
|
||||
"Invalid value in `snippetArray`, all items must be string literals but found {:?}",
|
||||
s
|
||||
"Invalid value in `snippetArray`, all items must be string literals but found {s:?}"
|
||||
);
|
||||
};
|
||||
let LiteralValue::String(litstr) = &lit.inner.value else {
|
||||
panic!(
|
||||
"Invalid value in `snippetArray`, all items must be string literals but found {:?}",
|
||||
s
|
||||
"Invalid value in `snippetArray`, all items must be string literals but found {s:?}"
|
||||
);
|
||||
};
|
||||
items.push(litstr.to_owned());
|
||||
@ -816,7 +814,7 @@ impl ArgData {
|
||||
}
|
||||
match self.ty.as_deref() {
|
||||
Some("Sketch") if self.kind == ArgKind::Special => None,
|
||||
Some(s) if s.starts_with("number") => Some((index, format!(r#"{label}${{{}:10}}"#, index))),
|
||||
Some(s) if s.starts_with("number") => Some((index, format!(r#"{label}${{{index}:10}}"#))),
|
||||
Some("Point2d") => Some((index + 1, format!(r#"{label}[${{{}:0}}, ${{{}:0}}]"#, index, index + 1))),
|
||||
Some("Point3d") => Some((
|
||||
index + 2,
|
||||
@ -831,7 +829,7 @@ impl ArgData {
|
||||
Some("Sketch") | Some("Sketch | Helix") => Some((index, format!(r#"{label}${{{index}:sketch000}}"#))),
|
||||
Some("Edge") => Some((index, format!(r#"{label}${{{index}:tag_or_edge_fn}}"#))),
|
||||
Some("[Edge; 1+]") => Some((index, format!(r#"{label}[${{{index}:tag_or_edge_fn}}]"#))),
|
||||
Some("Plane") | Some("Solid | Plane") => Some((index, format!(r#"{label}${{{}:XY}}"#, index))),
|
||||
Some("Plane") | Some("Solid | Plane") => Some((index, format!(r#"{label}${{{index}:XY}}"#))),
|
||||
Some("[TaggedFace; 2]") => Some((
|
||||
index + 1,
|
||||
format!(r#"{label}[${{{}:tag}}, ${{{}:tag}}]"#, index, index + 1),
|
||||
@ -841,10 +839,10 @@ impl ArgData {
|
||||
if self.name == "color" {
|
||||
Some((index, format!(r"{label}${{{}:{}}}", index, "\"#ff0000\"")))
|
||||
} else {
|
||||
Some((index, format!(r#"{label}${{{}:"string"}}"#, index)))
|
||||
Some((index, format!(r#"{label}${{{index}:"string"}}"#)))
|
||||
}
|
||||
}
|
||||
Some("bool") => Some((index, format!(r#"{label}${{{}:false}}"#, index))),
|
||||
Some("bool") => Some((index, format!(r#"{label}${{{index}:false}}"#))),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -1298,7 +1296,10 @@ mod test {
|
||||
continue;
|
||||
}
|
||||
let name = format!("{}-{i}", f.qual_name.replace("::", "-"));
|
||||
assert!(TEST_NAMES.contains(&&*name), "Missing test for example \"{name}\", maybe need to update kcl-derive-docs/src/example_tests.rs?")
|
||||
assert!(
|
||||
TEST_NAMES.contains(&&*name),
|
||||
"Missing test for example \"{name}\", maybe need to update kcl-derive-docs/src/example_tests.rs?"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1334,7 +1335,9 @@ mod test {
|
||||
};
|
||||
|
||||
let Some(DocData::Fn(d)) = data.children.get(&format!("I:{qualname}")) else {
|
||||
panic!("Could not find data for {NAME} (missing a child entry for {qualname}), maybe need to update kcl-derive-docs/src/example_tests.rs?");
|
||||
panic!(
|
||||
"Could not find data for {NAME} (missing a child entry for {qualname}), maybe need to update kcl-derive-docs/src/example_tests.rs?"
|
||||
);
|
||||
};
|
||||
|
||||
for (i, eg) in d.examples.iter().enumerate() {
|
||||
@ -1362,6 +1365,8 @@ mod test {
|
||||
return;
|
||||
}
|
||||
|
||||
panic!("Could not find data for {NAME} (no example {number}), maybe need to update kcl-derive-docs/src/example_tests.rs?");
|
||||
panic!(
|
||||
"Could not find data for {NAME} (no example {number}), maybe need to update kcl-derive-docs/src/example_tests.rs?"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -2,11 +2,11 @@
|
||||
//! tasks.
|
||||
|
||||
use std::sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
};
|
||||
|
||||
use tokio::sync::{mpsc, Notify};
|
||||
use tokio::sync::{Notify, mpsc};
|
||||
|
||||
use crate::errors::KclError;
|
||||
|
||||
|
@ -3,26 +3,26 @@
|
||||
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use futures::{SinkExt, StreamExt};
|
||||
use indexmap::IndexMap;
|
||||
use kcmc::{
|
||||
ModelingCmd,
|
||||
websocket::{
|
||||
BatchResponse, FailureWebSocketResponse, ModelingCmdReq, ModelingSessionData, OkWebSocketResponseData,
|
||||
SuccessWebSocketResponse, WebSocketRequest, WebSocketResponse,
|
||||
},
|
||||
ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds::{self as kcmc};
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
use tokio::sync::{RwLock, mpsc, oneshot};
|
||||
use tokio_tungstenite::tungstenite::Message as WsMsg;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
engine::{AsyncTasks, EngineManager, EngineStats},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{DefaultPlanes, IdGenerator},
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
@ -85,7 +85,7 @@ impl TcpRead {
|
||||
let msg = match msg {
|
||||
Ok(msg) => msg,
|
||||
Err(e) if matches!(e, tokio_tungstenite::tungstenite::Error::Protocol(_)) => {
|
||||
return Err(WebSocketReadError::Read(e))
|
||||
return Err(WebSocketReadError::Read(e));
|
||||
}
|
||||
Err(e) => return Err(anyhow::anyhow!("Error reading from engine's WebSocket: {e}").into()),
|
||||
};
|
||||
@ -427,7 +427,7 @@ impl EngineManager for EngineConnection {
|
||||
request_sent: tx,
|
||||
})
|
||||
.await
|
||||
.map_err(|e| KclError::new_engine(KclErrorDetails::new(format!("Failed to send debug: {}", e), vec![])))?;
|
||||
.map_err(|e| KclError::new_engine(KclErrorDetails::new(format!("Failed to send debug: {e}"), vec![])))?;
|
||||
|
||||
let _ = rx.await;
|
||||
Ok(())
|
||||
@ -463,7 +463,7 @@ impl EngineManager for EngineConnection {
|
||||
.await
|
||||
.map_err(|e| {
|
||||
KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to send modeling command: {}", e),
|
||||
format!("Failed to send modeling command: {e}"),
|
||||
vec![source_range],
|
||||
))
|
||||
})?;
|
||||
@ -533,7 +533,7 @@ impl EngineManager for EngineConnection {
|
||||
}
|
||||
|
||||
Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Modeling command timed out `{}`", id),
|
||||
format!("Modeling command timed out `{id}`"),
|
||||
vec![source_range],
|
||||
)))
|
||||
}
|
||||
|
@ -12,16 +12,16 @@ use kcmc::{
|
||||
WebSocketResponse,
|
||||
},
|
||||
};
|
||||
use kittycad_modeling_cmds::{self as kcmc, websocket::ModelingCmdReq, ImportFiles, ModelingCmd};
|
||||
use kittycad_modeling_cmds::{self as kcmc, ImportFiles, ModelingCmd, websocket::ModelingCmdReq};
|
||||
use tokio::sync::RwLock;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
engine::{AsyncTasks, EngineStats},
|
||||
errors::KclError,
|
||||
exec::DefaultPlanes,
|
||||
execution::IdGenerator,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -11,10 +11,10 @@ use uuid::Uuid;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
engine::{AsyncTasks, EngineStats},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{DefaultPlanes, IdGenerator},
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[wasm_bindgen(module = "/../../src/lang/std/engineConnection.ts")]
|
||||
|
@ -12,15 +12,15 @@ pub mod conn_wasm;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
},
|
||||
};
|
||||
|
||||
pub use async_tasks::AsyncTasks;
|
||||
use indexmap::IndexMap;
|
||||
use kcmc::{
|
||||
each_cmd as mcmd,
|
||||
ModelingCmd, each_cmd as mcmd,
|
||||
length_unit::LengthUnit,
|
||||
ok_response::OkModelingCmdResponse,
|
||||
shared::Color,
|
||||
@ -28,7 +28,6 @@ use kcmc::{
|
||||
BatchResponse, ModelingBatch, ModelingCmdReq, ModelingSessionData, OkWebSocketResponseData, WebSocketRequest,
|
||||
WebSocketResponse,
|
||||
},
|
||||
ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use parse_display::{Display, FromStr};
|
||||
@ -39,9 +38,9 @@ use uuid::Uuid;
|
||||
use web_time::Instant;
|
||||
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::UnitLen, DefaultPlanes, IdGenerator, PlaneInfo, Point3d},
|
||||
SourceRange,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{DefaultPlanes, IdGenerator, PlaneInfo, Point3d, types::UnitLen},
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
@ -291,7 +290,10 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
// the artifact graph won't care either if its gone since you can't select it
|
||||
// anymore anyways.
|
||||
if let Err(err) = self.async_tasks().join_all().await {
|
||||
crate::log::logln!("Error waiting for async tasks (this is typically fine and just means that an edge became something else): {:?}", err);
|
||||
crate::log::logln!(
|
||||
"Error waiting for async tasks (this is typically fine and just means that an edge became something else): {:?}",
|
||||
err
|
||||
);
|
||||
}
|
||||
|
||||
// Flush the batch to make sure nothing remains.
|
||||
@ -499,7 +501,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
}
|
||||
_ => {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("The request is not a modeling command: {:?}", req),
|
||||
format!("The request is not a modeling command: {req:?}"),
|
||||
vec![*range],
|
||||
)));
|
||||
}
|
||||
@ -529,7 +531,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
} else {
|
||||
// We should never get here.
|
||||
Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to get batch response: {:?}", response),
|
||||
format!("Failed to get batch response: {response:?}"),
|
||||
vec![source_range],
|
||||
)))
|
||||
}
|
||||
@ -544,7 +546,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
// an error.
|
||||
let source_range = id_to_source_range.get(cmd_id.as_ref()).cloned().ok_or_else(|| {
|
||||
KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to get source range for command ID: {:?}", cmd_id),
|
||||
format!("Failed to get source range for command ID: {cmd_id:?}"),
|
||||
vec![],
|
||||
))
|
||||
})?;
|
||||
@ -554,7 +556,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
self.parse_websocket_response(ws_resp, source_range)
|
||||
}
|
||||
_ => Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("The final request is not a modeling command: {:?}", final_req),
|
||||
format!("The final request is not a modeling command: {final_req:?}"),
|
||||
vec![source_range],
|
||||
))),
|
||||
}
|
||||
@ -663,7 +665,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
let info = DEFAULT_PLANE_INFO.get(&name).ok_or_else(|| {
|
||||
// We should never get here.
|
||||
KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to get default plane info for: {:?}", name),
|
||||
format!("Failed to get default plane info for: {name:?}"),
|
||||
vec![source_range],
|
||||
))
|
||||
})?;
|
||||
@ -739,7 +741,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
// Get the source range for the command.
|
||||
let source_range = id_to_source_range.get(cmd_id).cloned().ok_or_else(|| {
|
||||
KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to get source range for command ID: {:?}", cmd_id),
|
||||
format!("Failed to get source range for command ID: {cmd_id:?}"),
|
||||
vec![],
|
||||
))
|
||||
})?;
|
||||
@ -754,7 +756,7 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
|
||||
// Return an error that we did not get an error or the response we wanted.
|
||||
// This should never happen but who knows.
|
||||
Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("Failed to find response for command ID: {:?}", id),
|
||||
format!("Failed to find response for command ID: {id:?}"),
|
||||
vec![],
|
||||
)))
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::execution::{ArtifactCommand, ArtifactGraph, Operation};
|
||||
use crate::{
|
||||
ModuleId,
|
||||
execution::DefaultPlanes,
|
||||
lsp::IntoDiagnostic,
|
||||
modules::{ModulePath, ModuleSource},
|
||||
source_range::SourceRange,
|
||||
ModuleId,
|
||||
};
|
||||
|
||||
/// How did the KCL execution fail
|
||||
|
@ -2,13 +2,13 @@
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use kittycad_modeling_cmds::coord::{System, KITTYCAD, OPENGL, VULKAN};
|
||||
use kittycad_modeling_cmds::coord::{KITTYCAD, OPENGL, System, VULKAN};
|
||||
|
||||
use crate::{
|
||||
KclError, SourceRange,
|
||||
errors::KclErrorDetails,
|
||||
execution::types::{UnitAngle, UnitLen},
|
||||
parsing::ast::types::{Annotation, Expr, LiteralValue, Node, ObjectProperty},
|
||||
KclError, SourceRange,
|
||||
};
|
||||
|
||||
/// Annotations which should cause re-execution if they change.
|
||||
|
@ -1,20 +1,19 @@
|
||||
use fnv::FnvHashMap;
|
||||
use indexmap::IndexMap;
|
||||
use kittycad_modeling_cmds::{
|
||||
self as kcmc,
|
||||
self as kcmc, EnableSketchMode, ModelingCmd,
|
||||
ok_response::OkModelingCmdResponse,
|
||||
shared::ExtrusionFaceCapType,
|
||||
websocket::{BatchResponse, OkWebSocketResponseData, WebSocketResponse},
|
||||
EnableSketchMode, ModelingCmd,
|
||||
};
|
||||
use serde::{ser::SerializeSeq, Serialize};
|
||||
use serde::{Serialize, ser::SerializeSeq};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
KclError, NodePath, SourceRange,
|
||||
errors::KclErrorDetails,
|
||||
execution::ArtifactId,
|
||||
parsing::ast::types::{Node, Program},
|
||||
KclError, NodePath, SourceRange,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
@ -893,7 +892,10 @@ fn artifacts_to_update(
|
||||
),
|
||||
};
|
||||
if original_path_ids.len() != face_edge_infos.len() {
|
||||
internal_error!(range, "EntityMirror or EntityMirrorAcrossEdge response has different number face edge info than original mirrored paths: id={id:?}, cmd={cmd:?}, response={response:?}");
|
||||
internal_error!(
|
||||
range,
|
||||
"EntityMirror or EntityMirrorAcrossEdge response has different number face edge info than original mirrored paths: id={id:?}, cmd={cmd:?}, response={response:?}"
|
||||
);
|
||||
}
|
||||
let mut return_arr = Vec::new();
|
||||
for (face_edge_info, original_path_id) in face_edge_infos.iter().zip(original_path_ids) {
|
||||
@ -909,7 +911,10 @@ fn artifacts_to_update(
|
||||
// of its info.
|
||||
let Some(Artifact::Path(original_path)) = artifacts.get(&original_path_id) else {
|
||||
// We couldn't find the original path. This is a bug.
|
||||
internal_error!(range, "Couldn't find original path for mirror2d: original_path_id={original_path_id:?}, cmd={cmd:?}");
|
||||
internal_error!(
|
||||
range,
|
||||
"Couldn't find original path for mirror2d: original_path_id={original_path_id:?}, cmd={cmd:?}"
|
||||
);
|
||||
};
|
||||
Path {
|
||||
id: path_id,
|
||||
|
@ -268,7 +268,7 @@ impl ArtifactGraph {
|
||||
for (group_id, artifact_ids) in groups {
|
||||
let group_id = *stable_id_map.get(&group_id).unwrap();
|
||||
writeln!(output, "{prefix}subgraph path{group_id} [Path]")?;
|
||||
let indented = format!("{} ", prefix);
|
||||
let indented = format!("{prefix} ");
|
||||
for artifact_id in artifact_ids {
|
||||
let artifact = self.map.get(&artifact_id).unwrap();
|
||||
let id = *stable_id_map.get(&artifact_id).unwrap();
|
||||
@ -353,7 +353,7 @@ impl ArtifactGraph {
|
||||
node_path_display(output, prefix, None, &segment.code_ref)?;
|
||||
}
|
||||
Artifact::Solid2d(_solid2d) => {
|
||||
writeln!(output, "{prefix}{}[Solid2d]", id)?;
|
||||
writeln!(output, "{prefix}{id}[Solid2d]")?;
|
||||
}
|
||||
Artifact::StartSketchOnFace(StartSketchOnFace { code_ref, .. }) => {
|
||||
writeln!(
|
||||
@ -494,24 +494,24 @@ impl ArtifactGraph {
|
||||
match edge.flow {
|
||||
EdgeFlow::SourceToTarget => match edge.direction {
|
||||
EdgeDirection::Forward => {
|
||||
writeln!(output, "{prefix}{source_id} x{}--> {}", extra, target_id)?;
|
||||
writeln!(output, "{prefix}{source_id} x{extra}--> {target_id}")?;
|
||||
}
|
||||
EdgeDirection::Backward => {
|
||||
writeln!(output, "{prefix}{source_id} <{}--x {}", extra, target_id)?;
|
||||
writeln!(output, "{prefix}{source_id} <{extra}--x {target_id}")?;
|
||||
}
|
||||
EdgeDirection::Bidirectional => {
|
||||
writeln!(output, "{prefix}{source_id} {}--- {}", extra, target_id)?;
|
||||
writeln!(output, "{prefix}{source_id} {extra}--- {target_id}")?;
|
||||
}
|
||||
},
|
||||
EdgeFlow::TargetToSource => match edge.direction {
|
||||
EdgeDirection::Forward => {
|
||||
writeln!(output, "{prefix}{target_id} x{}--> {}", extra, source_id)?;
|
||||
writeln!(output, "{prefix}{target_id} x{extra}--> {source_id}")?;
|
||||
}
|
||||
EdgeDirection::Backward => {
|
||||
writeln!(output, "{prefix}{target_id} <{}--x {}", extra, source_id)?;
|
||||
writeln!(output, "{prefix}{target_id} <{extra}--x {source_id}")?;
|
||||
}
|
||||
EdgeDirection::Bidirectional => {
|
||||
writeln!(output, "{prefix}{target_id} {}--- {}", extra, source_id)?;
|
||||
writeln!(output, "{prefix}{target_id} {extra}--- {source_id}")?;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -6,15 +6,14 @@ use itertools::{EitherOrBoth, Itertools};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{
|
||||
ExecOutcome, ExecutorContext,
|
||||
execution::{
|
||||
annotations,
|
||||
EnvironmentRef, ExecutorSettings, annotations,
|
||||
memory::Stack,
|
||||
state::{self as exec_state, ModuleInfoMap},
|
||||
EnvironmentRef, ExecutorSettings,
|
||||
},
|
||||
parsing::ast::types::{Annotation, Node, Program},
|
||||
walk::Node as WalkNode,
|
||||
ExecOutcome, ExecutorContext,
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
@ -337,7 +336,7 @@ mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use crate::execution::{parse_execute, parse_execute_with_project_dir, ExecTestResults};
|
||||
use crate::execution::{ExecTestResults, parse_execute, parse_execute_with_project_dir};
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_get_changed_program_same_code() {
|
||||
@ -755,7 +754,7 @@ extrude(profile001, length = 100)"#
|
||||
.await;
|
||||
|
||||
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
|
||||
panic!("Expected CheckImportsOnly, got {:?}", result);
|
||||
panic!("Expected CheckImportsOnly, got {result:?}");
|
||||
};
|
||||
|
||||
assert_eq!(reapply_settings, false);
|
||||
@ -839,7 +838,7 @@ extrude(profile001, length = 100)
|
||||
.await;
|
||||
|
||||
let CacheResult::CheckImportsOnly { reapply_settings, .. } = result else {
|
||||
panic!("Expected CheckImportsOnly, got {:?}", result);
|
||||
panic!("Expected CheckImportsOnly, got {result:?}");
|
||||
};
|
||||
|
||||
assert_eq!(reapply_settings, false);
|
||||
|
@ -1,10 +1,10 @@
|
||||
use indexmap::IndexMap;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{types::NumericType, ArtifactId, KclValue};
|
||||
use super::{ArtifactId, KclValue, types::NumericType};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::parsing::ast::types::{Node, Program};
|
||||
use crate::{parsing::ast::types::ItemVisibility, ModuleId, NodePath, SourceRange};
|
||||
use crate::{ModuleId, NodePath, SourceRange, parsing::ast::types::ItemVisibility};
|
||||
|
||||
/// A CAD modeling operation for display in the feature tree, AKA operations
|
||||
/// timeline.
|
||||
@ -57,7 +57,7 @@ impl Operation {
|
||||
/// If the variant is `StdLibCall`, set the `is_error` field.
|
||||
pub(crate) fn set_std_lib_call_is_error(&mut self, is_err: bool) {
|
||||
match self {
|
||||
Self::StdLibCall { ref mut is_error, .. } => *is_error = is_err,
|
||||
Self::StdLibCall { is_error, .. } => *is_error = is_err,
|
||||
Self::VariableDeclaration { .. } | Self::GroupBegin { .. } | Self::GroupEnd => {}
|
||||
}
|
||||
}
|
||||
|
@ -3,17 +3,17 @@ use std::collections::HashMap;
|
||||
use async_recursion::async_recursion;
|
||||
|
||||
use crate::{
|
||||
CompilationError, NodePath,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
annotations,
|
||||
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, ModelingCmdMeta, ModuleArtifactState,
|
||||
Operation, PlaneType, StatementKind, TagIdentifier, annotations,
|
||||
cad_op::OpKclValue,
|
||||
fn_call::Args,
|
||||
kcl_value::{FunctionSource, TypeDef},
|
||||
memory,
|
||||
state::ModuleState,
|
||||
types::{NumericType, PrimitiveType, RuntimeType},
|
||||
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, ModelingCmdMeta, ModuleArtifactState,
|
||||
Operation, PlaneType, StatementKind, TagIdentifier,
|
||||
},
|
||||
fmt,
|
||||
modules::{ModuleId, ModulePath, ModuleRepr},
|
||||
@ -28,7 +28,6 @@ use crate::{
|
||||
},
|
||||
source_range::SourceRange,
|
||||
std::args::TyF64,
|
||||
CompilationError, NodePath,
|
||||
};
|
||||
|
||||
impl<'a> StatementKind<'a> {
|
||||
@ -198,19 +197,23 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
if ty.is_ok() && !module_exports.contains(&ty_name) {
|
||||
ty = Err(KclError::new_semantic(KclErrorDetails::new(format!(
|
||||
ty = Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
|
||||
import_item.name.name
|
||||
),
|
||||
vec![SourceRange::from(&import_item.name)],)));
|
||||
vec![SourceRange::from(&import_item.name)],
|
||||
)));
|
||||
}
|
||||
|
||||
if mod_value.is_ok() && !module_exports.contains(&mod_name) {
|
||||
mod_value = Err(KclError::new_semantic(KclErrorDetails::new(format!(
|
||||
mod_value = Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"Cannot import \"{}\" from module because it is not exported. Add \"export\" before the definition to export it.",
|
||||
import_item.name.name
|
||||
),
|
||||
vec![SourceRange::from(&import_item.name)],)));
|
||||
vec![SourceRange::from(&import_item.name)],
|
||||
)));
|
||||
}
|
||||
|
||||
if value.is_err() && ty.is_err() && mod_value.is_err() {
|
||||
@ -270,7 +273,7 @@ impl ExecutorContext {
|
||||
.get_from(name, env_ref, source_range, 0)
|
||||
.map_err(|_err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("{} is not defined in module (but was exported?)", name),
|
||||
format!("{name} is not defined in module (but was exported?)"),
|
||||
vec![source_range],
|
||||
))
|
||||
})?
|
||||
@ -431,7 +434,7 @@ impl ExecutorContext {
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
"User-defined types are not yet supported.".to_owned(),
|
||||
vec![metadata.source_range],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
},
|
||||
}
|
||||
@ -792,11 +795,12 @@ fn var_in_own_ref_err(e: KclError, being_declared: &Option<String>) -> KclError
|
||||
// TODO after June 26th: replace this with a let-chain,
|
||||
// which will be available in Rust 1.88
|
||||
// https://rust-lang.github.io/rfcs/2497-if-let-chains.html
|
||||
match (&being_declared, &name) {
|
||||
(Some(name0), Some(name1)) if name0 == name1 => {
|
||||
details.message = format!("You can't use `{name0}` because you're currently trying to define it. Use a different variable here instead.");
|
||||
}
|
||||
_ => {}
|
||||
if let (Some(name0), Some(name1)) = (&being_declared, &name)
|
||||
&& name0 == name1
|
||||
{
|
||||
details.message = format!(
|
||||
"You can't use `{name0}` because you're currently trying to define it. Use a different variable here instead."
|
||||
);
|
||||
}
|
||||
KclError::UndefinedValue { details, name }
|
||||
}
|
||||
@ -1077,7 +1081,7 @@ impl Node<BinaryExpression> {
|
||||
(&left_value, &right_value)
|
||||
{
|
||||
return Ok(KclValue::String {
|
||||
value: format!("{}{}", left, right),
|
||||
value: format!("{left}{right}"),
|
||||
meta,
|
||||
});
|
||||
}
|
||||
@ -1237,7 +1241,9 @@ impl Node<BinaryExpression> {
|
||||
exec_state.clear_units_warnings(&sr);
|
||||
let mut err = CompilationError::err(
|
||||
sr,
|
||||
format!("{} numbers which have unknown or incompatible units.\nYou can probably fix this error by specifying the units using type ascription, e.g., `len: number(mm)` or `(a * b): number(deg)`.", verb),
|
||||
format!(
|
||||
"{verb} numbers which have unknown or incompatible units.\nYou can probably fix this error by specifying the units using type ascription, e.g., `len: number(mm)` or `(a * b): number(deg)`."
|
||||
),
|
||||
);
|
||||
err.tag = crate::errors::Tag::UnknownNumericUnits;
|
||||
exec_state.warn(err);
|
||||
@ -1417,7 +1423,7 @@ async fn inner_execute_pipe_body(
|
||||
for expression in body {
|
||||
if let Expr::TagDeclarator(_) = expression {
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("This cannot be in a PipeExpression: {:?}", expression),
|
||||
format!("This cannot be in a PipeExpression: {expression:?}"),
|
||||
vec![expression.into()],
|
||||
)));
|
||||
}
|
||||
@ -1699,9 +1705,15 @@ fn jvalue_to_prop(value: &KclValue, property_sr: Vec<SourceRange>, name: &str) -
|
||||
let make_err =
|
||||
|message: String| Err::<Property, _>(KclError::new_semantic(KclErrorDetails::new(message, property_sr)));
|
||||
match value {
|
||||
n @ KclValue::Number{value: num, ty, .. } => {
|
||||
if !matches!(ty, NumericType::Known(crate::exec::UnitType::Count) | NumericType::Default { .. } | NumericType::Any ) {
|
||||
return make_err(format!("arrays can only be indexed by non-dimensioned numbers, found {}", n.human_friendly_type()));
|
||||
n @ KclValue::Number { value: num, ty, .. } => {
|
||||
if !matches!(
|
||||
ty,
|
||||
NumericType::Known(crate::exec::UnitType::Count) | NumericType::Default { .. } | NumericType::Any
|
||||
) {
|
||||
return make_err(format!(
|
||||
"arrays can only be indexed by non-dimensioned numbers, found {}",
|
||||
n.human_friendly_type()
|
||||
));
|
||||
}
|
||||
let num = *num;
|
||||
if num < 0.0 {
|
||||
@ -1711,13 +1723,15 @@ fn jvalue_to_prop(value: &KclValue, property_sr: Vec<SourceRange>, name: &str) -
|
||||
if let Some(nearest_int) = nearest_int {
|
||||
Ok(Property::UInt(nearest_int))
|
||||
} else {
|
||||
make_err(format!("'{num}' is not an integer, so you can't index an array with it"))
|
||||
make_err(format!(
|
||||
"'{num}' is not an integer, so you can't index an array with it"
|
||||
))
|
||||
}
|
||||
}
|
||||
KclValue::String{value: x, meta:_} => Ok(Property::String(x.to_owned())),
|
||||
_ => {
|
||||
make_err(format!("{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array"))
|
||||
}
|
||||
KclValue::String { value: x, meta: _ } => Ok(Property::String(x.to_owned())),
|
||||
_ => make_err(format!(
|
||||
"{name} is not a valid property/index, you can only use a string to get the property of an object, or an int (>= 0) to get an item in an array"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1745,9 +1759,9 @@ mod test {
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
exec::UnitType,
|
||||
execution::{parse_execute, ContextType},
|
||||
ExecutorSettings, UnitLen,
|
||||
exec::UnitType,
|
||||
execution::{ContextType, parse_execute},
|
||||
};
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
@ -1777,7 +1791,7 @@ arr1 = [42]: [number(cm)]
|
||||
.get_from("arr1", result.mem_env, SourceRange::default(), 0)
|
||||
.unwrap();
|
||||
if let KclValue::HomArray { value, ty } = arr1 {
|
||||
assert_eq!(value.len(), 1, "Expected Vec with specific length: found {:?}", value);
|
||||
assert_eq!(value.len(), 1, "Expected Vec with specific length: found {value:?}");
|
||||
assert_eq!(*ty, RuntimeType::known_length(UnitLen::Cm));
|
||||
// Compare, ignoring meta.
|
||||
if let KclValue::Number { value, ty, .. } = &value[0] {
|
||||
@ -1946,7 +1960,7 @@ d = b + c
|
||||
.await
|
||||
.map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to create mock engine connection: {}", err),
|
||||
format!("Failed to create mock engine connection: {err}"),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
})
|
||||
|
@ -2,19 +2,19 @@ use async_recursion::async_recursion;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use crate::{
|
||||
CompilationError, NodePath,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, StatementKind, TagEngineInfo,
|
||||
TagIdentifier,
|
||||
cad_op::{Group, OpArg, OpKclValue, Operation},
|
||||
kcl_value::FunctionSource,
|
||||
memory,
|
||||
types::RuntimeType,
|
||||
BodyType, EnvironmentRef, ExecState, ExecutorContext, KclValue, Metadata, StatementKind, TagEngineInfo,
|
||||
TagIdentifier,
|
||||
},
|
||||
parsing::ast::types::{CallExpressionKw, DefaultParamVal, FunctionExpression, Node, Program, Type},
|
||||
source_range::SourceRange,
|
||||
std::StdFn,
|
||||
CompilationError, NodePath,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -269,7 +269,7 @@ impl Node<CallExpressionKw> {
|
||||
};
|
||||
KclError::new_undefined_value(
|
||||
KclErrorDetails::new(
|
||||
format!("Result of user-defined function {} is undefined", fn_name),
|
||||
format!("Result of user-defined function {fn_name} is undefined"),
|
||||
source_ranges,
|
||||
),
|
||||
None,
|
||||
@ -401,7 +401,7 @@ impl FunctionDefinition<'_> {
|
||||
impl FunctionBody<'_> {
|
||||
fn prep_mem(&self, exec_state: &mut ExecState) {
|
||||
match self {
|
||||
FunctionBody::Rust(_) => exec_state.mut_stack().push_new_env_for_rust_call(),
|
||||
FunctionBody::Rust(_) => exec_state.mut_stack().push_new_root_env(true),
|
||||
FunctionBody::Kcl(_, memory) => exec_state.mut_stack().push_new_env_for_call(*memory),
|
||||
}
|
||||
}
|
||||
@ -445,7 +445,7 @@ fn update_memory_for_tags_of_geometry(result: &mut KclValue, exec_state: &mut Ex
|
||||
}
|
||||
}
|
||||
}
|
||||
KclValue::Solid { ref mut value } => {
|
||||
KclValue::Solid { value } => {
|
||||
for v in &value.value {
|
||||
if let Some(tag) = v.get_tag() {
|
||||
// Get the past tag and update it.
|
||||
@ -555,9 +555,9 @@ fn type_err_str(expected: &Type, found: &KclValue, source_range: &SourceRange, e
|
||||
let found_human = found.human_friendly_type();
|
||||
let found_ty = found.principal_type_string();
|
||||
let found_str = if found_human == found_ty || found_human == format!("a {}", strip_backticks(&found_ty)) {
|
||||
format!("a value with type {}", found_ty)
|
||||
format!("a value with type {found_ty}")
|
||||
} else {
|
||||
format!("{found_human} (with type {})", found_ty)
|
||||
format!("{found_human} (with type {found_ty})")
|
||||
};
|
||||
|
||||
let mut result = format!("{expected_str}, but found {found_str}.");
|
||||
@ -626,7 +626,7 @@ fn type_check_params_kw(
|
||||
format!(
|
||||
"`{label}` is not an argument of {}",
|
||||
fn_name
|
||||
.map(|n| format!("`{}`", n))
|
||||
.map(|n| format!("`{n}`"))
|
||||
.unwrap_or_else(|| "this function".to_owned()),
|
||||
),
|
||||
));
|
||||
@ -676,7 +676,7 @@ fn type_check_params_kw(
|
||||
format!(
|
||||
"The input argument of {} requires {}",
|
||||
fn_name
|
||||
.map(|n| format!("`{}`", n))
|
||||
.map(|n| format!("`{n}`"))
|
||||
.unwrap_or_else(|| "this function".to_owned()),
|
||||
type_err_str(ty, &arg.1.value, &arg.1.source_range, exec_state),
|
||||
),
|
||||
@ -691,7 +691,7 @@ fn type_check_params_kw(
|
||||
format!(
|
||||
"{} expects an unlabeled first argument (`@{name}`), but it is labelled in the call",
|
||||
fn_name
|
||||
.map(|n| format!("The function `{}`", n))
|
||||
.map(|n| format!("The function `{n}`"))
|
||||
.unwrap_or_else(|| "This function".to_owned()),
|
||||
),
|
||||
));
|
||||
@ -721,7 +721,7 @@ fn assign_args_to_params_kw(
|
||||
)?;
|
||||
}
|
||||
None => match default {
|
||||
Some(ref default_val) => {
|
||||
Some(default_val) => {
|
||||
let value = KclValue::from_default_param(default_val.clone(), exec_state);
|
||||
exec_state
|
||||
.mut_stack()
|
||||
@ -729,10 +729,7 @@ fn assign_args_to_params_kw(
|
||||
}
|
||||
None => {
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!(
|
||||
"This function requires a parameter {}, but you haven't passed it one.",
|
||||
name
|
||||
),
|
||||
format!("This function requires a parameter {name}, but you haven't passed it one."),
|
||||
source_ranges,
|
||||
)));
|
||||
}
|
||||
@ -746,7 +743,9 @@ fn assign_args_to_params_kw(
|
||||
let Some(unlabeled) = unlabelled else {
|
||||
return Err(if args.kw_args.labeled.contains_key(param_name) {
|
||||
KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("The function does declare a parameter named '{param_name}', but this parameter doesn't use a label. Try removing the `{param_name}:`"),
|
||||
format!(
|
||||
"The function does declare a parameter named '{param_name}', but this parameter doesn't use a label. Try removing the `{param_name}:`"
|
||||
),
|
||||
source_ranges,
|
||||
))
|
||||
} else {
|
||||
@ -799,7 +798,7 @@ mod test {
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
execution::{memory::Stack, parse_execute, types::NumericType, ContextType},
|
||||
execution::{ContextType, memory::Stack, parse_execute, types::NumericType},
|
||||
parsing::ast::types::{DefaultParamVal, Identifier, Parameter},
|
||||
};
|
||||
|
||||
|
@ -3,16 +3,16 @@ use std::ops::{Add, AddAssign, Mul};
|
||||
use anyhow::Result;
|
||||
use indexmap::IndexMap;
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use kittycad_modeling_cmds::{each_cmd as mcmd, length_unit::LengthUnit, websocket::ModelingCmdReq, ModelingCmd};
|
||||
use kittycad_modeling_cmds::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit, websocket::ModelingCmdReq};
|
||||
use parse_display::{Display, FromStr};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
engine::{PlaneName, DEFAULT_PLANE_INFO},
|
||||
engine::{DEFAULT_PLANE_INFO, PlaneName},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::NumericType, ArtifactId, ExecState, ExecutorContext, Metadata, TagEngineInfo, TagIdentifier, UnitLen,
|
||||
ArtifactId, ExecState, ExecutorContext, Metadata, TagEngineInfo, TagIdentifier, UnitLen, types::NumericType,
|
||||
},
|
||||
parsing::ast::types::{Node, NodeRef, TagDeclarator, TagNode},
|
||||
std::{args::TyF64, sketch::PlaneData},
|
||||
@ -472,7 +472,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
|
||||
PlaneData::Plane(_) => {
|
||||
// We will never get here since we already checked for PlaneData::Plane.
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("PlaneData {:?} not found", value),
|
||||
format!("PlaneData {value:?} not found"),
|
||||
Default::default(),
|
||||
)));
|
||||
}
|
||||
@ -480,7 +480,7 @@ impl TryFrom<PlaneData> for PlaneInfo {
|
||||
|
||||
let info = DEFAULT_PLANE_INFO.get(&name).ok_or_else(|| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Plane {} not found", name),
|
||||
format!("Plane {name} not found"),
|
||||
Default::default(),
|
||||
))
|
||||
})?;
|
||||
@ -815,8 +815,8 @@ impl EdgeCut {
|
||||
|
||||
pub fn set_id(&mut self, id: uuid::Uuid) {
|
||||
match self {
|
||||
EdgeCut::Fillet { id: ref mut i, .. } => *i = id,
|
||||
EdgeCut::Chamfer { id: ref mut i, .. } => *i = id,
|
||||
EdgeCut::Fillet { id: i, .. } => *i = id,
|
||||
EdgeCut::Chamfer { id: i, .. } => *i = id,
|
||||
}
|
||||
}
|
||||
|
||||
@ -829,8 +829,8 @@ impl EdgeCut {
|
||||
|
||||
pub fn set_edge_id(&mut self, id: uuid::Uuid) {
|
||||
match self {
|
||||
EdgeCut::Fillet { edge_id: ref mut i, .. } => *i = id,
|
||||
EdgeCut::Chamfer { edge_id: ref mut i, .. } => *i = id,
|
||||
EdgeCut::Fillet { edge_id: i, .. } => *i = id,
|
||||
EdgeCut::Chamfer { edge_id: i, .. } => *i = id,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,12 +2,12 @@ use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{
|
||||
coord::{System, KITTYCAD},
|
||||
ImportFile, ModelingCmd,
|
||||
coord::{KITTYCAD, System},
|
||||
each_cmd as mcmd,
|
||||
format::InputFormat3d,
|
||||
shared::FileImportFormat,
|
||||
units::UnitLength,
|
||||
ImportFile, ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -16,8 +16,8 @@ use uuid::Uuid;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
annotations, typed_path::TypedPath, types::UnitLen, ExecState, ExecutorContext, ImportedGeometry,
|
||||
ModelingCmdMeta,
|
||||
ExecState, ExecutorContext, ImportedGeometry, ModelingCmdMeta, annotations, typed_path::TypedPath,
|
||||
types::UnitLen,
|
||||
},
|
||||
fs::FileSystem,
|
||||
parsing::ast::types::{Annotation, Node},
|
||||
@ -184,7 +184,7 @@ pub(super) fn format_from_annotations(
|
||||
annotations::IMPORT_LENGTH_UNIT
|
||||
),
|
||||
vec![p.as_source_range()],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -225,7 +225,7 @@ fn set_coords(fmt: &mut InputFormat3d, coords_str: &str, source_range: SourceRan
|
||||
annotations::IMPORT_COORDS
|
||||
),
|
||||
vec![source_range],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -246,7 +246,7 @@ fn set_length_unit(fmt: &mut InputFormat3d, units_str: &str, source_range: Sourc
|
||||
annotations::IMPORT_LENGTH_UNIT
|
||||
),
|
||||
vec![source_range],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,7 +291,9 @@ fn get_import_format_from_extension(ext: &str) -> Result<InputFormat3d> {
|
||||
} else if ext == "glb" {
|
||||
FileImportFormat::Gltf
|
||||
} else {
|
||||
anyhow::bail!("unknown source format for file extension: {ext}. Try setting the `--src-format` flag explicitly or use a valid format.")
|
||||
anyhow::bail!(
|
||||
"unknown source format for file extension: {ext}. Try setting the `--src-format` flag explicitly or use a valid format."
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -6,12 +6,12 @@ use std::{
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
ExecState, ExecutorContext, KclError, ModuleId, SourceRange,
|
||||
errors::KclErrorDetails,
|
||||
execution::typed_path::TypedPath,
|
||||
modules::{ModulePath, ModuleRepr},
|
||||
parsing::ast::types::{ImportPath, ImportStatement, Node as AstNode},
|
||||
walk::{Node, Visitable},
|
||||
ExecState, ExecutorContext, KclError, ModuleId, SourceRange,
|
||||
};
|
||||
|
||||
/// Specific dependency between two modules. The 0th element of this info
|
||||
@ -147,7 +147,7 @@ fn import_dependencies(
|
||||
ret.lock()
|
||||
.map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
format!("Failed to lock mutex: {err}"),
|
||||
Default::default(),
|
||||
))
|
||||
})?
|
||||
@ -157,7 +157,7 @@ fn import_dependencies(
|
||||
ret.lock()
|
||||
.map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
format!("Failed to lock mutex: {err}"),
|
||||
Default::default(),
|
||||
))
|
||||
})?
|
||||
@ -179,7 +179,7 @@ fn import_dependencies(
|
||||
|
||||
let ret = ret.lock().map_err(|err| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Failed to lock mutex: {}", err),
|
||||
format!("Failed to lock mutex: {err}"),
|
||||
Default::default(),
|
||||
))
|
||||
})?;
|
||||
@ -224,7 +224,7 @@ pub(crate) async fn import_universe(
|
||||
let repr = {
|
||||
let Some(module_info) = exec_state.get_module(module_id) else {
|
||||
return Err(KclError::new_internal(KclErrorDetails::new(
|
||||
format!("Module {} not found", module_id),
|
||||
format!("Module {module_id} not found"),
|
||||
vec![import_stmt.into()],
|
||||
)));
|
||||
};
|
||||
@ -244,9 +244,7 @@ mod tests {
|
||||
use crate::parsing::ast::types::{ImportSelector, Program};
|
||||
|
||||
macro_rules! kcl {
|
||||
( $kcl:expr ) => {{
|
||||
$crate::parsing::top_level_parse($kcl).unwrap()
|
||||
}};
|
||||
( $kcl:expr_2021 ) => {{ $crate::parsing::top_level_parse($kcl).unwrap() }};
|
||||
}
|
||||
|
||||
fn into_module_info(program: AstNode<Program>) -> DependencyInfo {
|
||||
|
@ -5,18 +5,18 @@ use schemars::JsonSchema;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
CompilationError, KclError, ModuleId, SourceRange,
|
||||
errors::KclErrorDetails,
|
||||
execution::{
|
||||
annotations::{SETTINGS, SETTINGS_UNIT_LENGTH},
|
||||
types::{NumericType, PrimitiveType, RuntimeType, UnitLen},
|
||||
EnvironmentRef, ExecState, Face, Geometry, GeometryWithImportedGeometry, Helix, ImportedGeometry, MetaSettings,
|
||||
Metadata, Plane, Sketch, Solid, TagIdentifier,
|
||||
annotations::{SETTINGS, SETTINGS_UNIT_LENGTH},
|
||||
types::{NumericType, PrimitiveType, RuntimeType, UnitLen},
|
||||
},
|
||||
parsing::ast::types::{
|
||||
DefaultParamVal, FunctionExpression, KclNone, Literal, LiteralValue, Node, TagDeclarator, TagNode,
|
||||
},
|
||||
std::{args::TyF64, StdFnProps},
|
||||
CompilationError, KclError, ModuleId, SourceRange,
|
||||
std::{StdFnProps, args::TyF64},
|
||||
};
|
||||
|
||||
pub type KclObjectFields = HashMap<String, KclValue>;
|
||||
@ -136,9 +136,9 @@ impl JsonSchema for FunctionSource {
|
||||
"FunctionSource".to_owned()
|
||||
}
|
||||
|
||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
// TODO: Actually generate a reasonable schema.
|
||||
gen.subschema_for::<()>()
|
||||
r#gen.subschema_for::<()>()
|
||||
}
|
||||
}
|
||||
|
||||
@ -587,7 +587,7 @@ impl KclValue {
|
||||
match self {
|
||||
KclValue::TagIdentifier(t) => Ok(*t.clone()),
|
||||
_ => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Not a tag identifier: {:?}", self),
|
||||
format!("Not a tag identifier: {self:?}"),
|
||||
self.clone().into(),
|
||||
))),
|
||||
}
|
||||
@ -598,7 +598,7 @@ impl KclValue {
|
||||
match self {
|
||||
KclValue::TagDeclarator(t) => Ok((**t).clone()),
|
||||
_ => Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Not a tag declarator: {:?}", self),
|
||||
format!("Not a tag declarator: {self:?}"),
|
||||
self.clone().into(),
|
||||
))),
|
||||
}
|
||||
|
@ -207,8 +207,8 @@ use std::{
|
||||
fmt,
|
||||
pin::Pin,
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering},
|
||||
Arc,
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering},
|
||||
},
|
||||
};
|
||||
|
||||
@ -489,7 +489,7 @@ impl ProgramMemory {
|
||||
}
|
||||
|
||||
Err(KclError::new_undefined_value(
|
||||
KclErrorDetails::new(format!("`{}` is not defined", var), vec![]),
|
||||
KclErrorDetails::new(format!("`{var}` is not defined"), vec![]),
|
||||
Some(var.to_owned()),
|
||||
))
|
||||
}
|
||||
@ -541,22 +541,6 @@ impl Stack {
|
||||
self.push_new_env_for_call(snapshot);
|
||||
}
|
||||
|
||||
/// Push a new stack frame on to the call stack for callees which should not read or write
|
||||
/// from memory.
|
||||
///
|
||||
/// This is suitable for calling standard library functions or other functions written in Rust
|
||||
/// which will use 'Rust memory' rather than KCL's memory and cannot reach into the wider
|
||||
/// environment.
|
||||
///
|
||||
/// Trying to read or write from this environment will panic with an index out of bounds.
|
||||
pub fn push_new_env_for_rust_call(&mut self) {
|
||||
self.call_stack.push(self.current_env);
|
||||
// Rust functions shouldn't try to set or access anything in their environment, so don't
|
||||
// waste time and space on a new env. Using usize::MAX means we'll get an overflow if we
|
||||
// try to access anything rather than a silent error.
|
||||
self.current_env = EnvironmentRef(usize::MAX, 0);
|
||||
}
|
||||
|
||||
/// Push a new stack frame on to the call stack with no connection to a parent environment.
|
||||
///
|
||||
/// Suitable for executing a separate module.
|
||||
@ -647,7 +631,7 @@ impl Stack {
|
||||
let env = self.memory.get_env(self.current_env.index());
|
||||
if env.contains_key(&key) {
|
||||
return Err(KclError::new_value_already_defined(KclErrorDetails::new(
|
||||
format!("Cannot redefine `{}`", key),
|
||||
format!("Cannot redefine `{key}`"),
|
||||
vec![source_range],
|
||||
)));
|
||||
}
|
||||
@ -683,7 +667,7 @@ impl Stack {
|
||||
env.contains_key(var)
|
||||
}
|
||||
|
||||
/// Get a key from the first KCL (i.e., non-Rust) stack frame on the call stack.
|
||||
/// Get a key from the first stack frame on the call stack.
|
||||
pub fn get_from_call_stack(&self, key: &str, source_range: SourceRange) -> Result<(usize, &KclValue), KclError> {
|
||||
if !self.current_env.skip_env() {
|
||||
return Ok((self.current_env.1, self.get(key, source_range)?));
|
||||
@ -695,7 +679,7 @@ impl Stack {
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!("It can't be Rust frames all the way down");
|
||||
unreachable!("No frames on the stack?");
|
||||
}
|
||||
|
||||
/// Iterate over all keys in the current environment which satisfy the provided predicate.
|
||||
@ -1047,7 +1031,7 @@ mod env {
|
||||
}
|
||||
|
||||
/// Take all bindings from the environment.
|
||||
pub(super) fn take_bindings(self: Pin<&mut Self>) -> impl Iterator<Item = (String, (usize, KclValue))> {
|
||||
pub(super) fn take_bindings(self: Pin<&mut Self>) -> impl Iterator<Item = (String, (usize, KclValue))> + use<> {
|
||||
// SAFETY: caller must have unique access since self is mut. We're not moving or invalidating `self`.
|
||||
let bindings = std::mem::take(unsafe { self.bindings.get().as_mut().unwrap() });
|
||||
bindings.into_iter()
|
||||
@ -1217,24 +1201,6 @@ mod test {
|
||||
assert_get_from(mem, "c", 5, callee);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rust_env() {
|
||||
let mem = &mut Stack::new_for_tests();
|
||||
mem.add("a".to_owned(), val(1), sr()).unwrap();
|
||||
mem.add("b".to_owned(), val(3), sr()).unwrap();
|
||||
let sn = mem.snapshot();
|
||||
|
||||
mem.push_new_env_for_rust_call();
|
||||
mem.push_new_env_for_call(sn);
|
||||
assert_get(mem, "b", 3);
|
||||
mem.add("b".to_owned(), val(4), sr()).unwrap();
|
||||
assert_get(mem, "b", 4);
|
||||
|
||||
mem.pop_env();
|
||||
mem.pop_env();
|
||||
assert_get(mem, "b", 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deep_call_env() {
|
||||
let mem = &mut Stack::new_for_tests();
|
||||
|
@ -16,10 +16,9 @@ pub(crate) use import::PreImportedGeometry;
|
||||
use indexmap::IndexMap;
|
||||
pub use kcl_value::{KclObjectFields, KclValue};
|
||||
use kcmc::{
|
||||
each_cmd as mcmd,
|
||||
ok_response::{output::TakeSnapshot, OkModelingCmdResponse},
|
||||
ImageFormat, ModelingCmd, each_cmd as mcmd,
|
||||
ok_response::{OkModelingCmdResponse, output::TakeSnapshot},
|
||||
websocket::{ModelingSessionData, OkWebSocketResponseData},
|
||||
ImageFormat, ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds::{self as kcmc, id::ModelingCmdId};
|
||||
pub use memory::EnvironmentRef;
|
||||
@ -31,6 +30,7 @@ pub use state::{ExecState, MetaSettings};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
CompilationError, ExecError, KclErrorWithOutputs,
|
||||
engine::{EngineManager, GridScaleBehavior},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
@ -43,7 +43,6 @@ use crate::{
|
||||
modules::{ModuleId, ModulePath, ModuleRepr},
|
||||
parsing::ast::types::{Expr, ImportPath, NodeRef},
|
||||
source_range::SourceRange,
|
||||
CompilationError, ExecError, KclErrorWithOutputs,
|
||||
};
|
||||
|
||||
pub(crate) mod annotations;
|
||||
@ -1329,7 +1328,7 @@ impl ExecutorContext {
|
||||
created: if deterministic_time {
|
||||
Some("2021-01-01T00:00:00Z".parse().map_err(|e| {
|
||||
KclError::new_internal(crate::errors::KclErrorDetails::new(
|
||||
format!("Failed to parse date: {}", e),
|
||||
format!("Failed to parse date: {e}"),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
})?)
|
||||
@ -1409,7 +1408,7 @@ pub(crate) async fn parse_execute_with_project_dir(
|
||||
engine: Arc::new(Box::new(
|
||||
crate::engine::conn_mock::EngineConnection::new().await.map_err(|err| {
|
||||
KclError::new_internal(crate::errors::KclErrorDetails::new(
|
||||
format!("Failed to create mock engine connection: {}", err),
|
||||
format!("Failed to create mock engine connection: {err}"),
|
||||
vec![SourceRange::default()],
|
||||
))
|
||||
})?,
|
||||
@ -1446,7 +1445,7 @@ mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use crate::{errors::KclErrorDetails, execution::memory::Stack, ModuleId};
|
||||
use crate::{ModuleId, errors::KclErrorDetails, execution::memory::Stack};
|
||||
|
||||
/// Convenience function to get a JSON value from memory and unwrap.
|
||||
#[track_caller]
|
||||
@ -1921,6 +1920,22 @@ shape = layer() |> patternTransform(instances = 10, transform = transform)
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn pass_std_to_std() {
|
||||
let ast = r#"sketch001 = startSketchOn(XY)
|
||||
profile001 = circle(sketch001, center = [0, 0], radius = 2)
|
||||
extrude001 = extrude(profile001, length = 5)
|
||||
extrudes = patternLinear3d(
|
||||
extrude001,
|
||||
instances = 3,
|
||||
distance = 5,
|
||||
axis = [1, 1, 0],
|
||||
)
|
||||
clone001 = map(extrudes, f = clone)
|
||||
"#;
|
||||
parse_execute(ast).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_zero_param_fn() {
|
||||
let ast = r#"sigmaAllow = 35000 // psi
|
||||
@ -2045,8 +2060,7 @@ notFunction = !x";
|
||||
fn_err
|
||||
.message()
|
||||
.starts_with("Cannot apply unary operator ! to non-boolean value: "),
|
||||
"Actual error: {:?}",
|
||||
fn_err
|
||||
"Actual error: {fn_err:?}"
|
||||
);
|
||||
|
||||
let code8 = "
|
||||
@ -2059,8 +2073,7 @@ notTagDeclarator = !myTagDeclarator";
|
||||
tag_declarator_err
|
||||
.message()
|
||||
.starts_with("Cannot apply unary operator ! to non-boolean value: a tag declarator"),
|
||||
"Actual error: {:?}",
|
||||
tag_declarator_err
|
||||
"Actual error: {tag_declarator_err:?}"
|
||||
);
|
||||
|
||||
let code9 = "
|
||||
@ -2073,8 +2086,7 @@ notTagIdentifier = !myTag";
|
||||
tag_identifier_err
|
||||
.message()
|
||||
.starts_with("Cannot apply unary operator ! to non-boolean value: a tag identifier"),
|
||||
"Actual error: {:?}",
|
||||
tag_identifier_err
|
||||
"Actual error: {tag_identifier_err:?}"
|
||||
);
|
||||
|
||||
let code10 = "notPipe = !(1 |> 2)";
|
||||
@ -2226,7 +2238,7 @@ w = f() + f()
|
||||
if let Err(err) = ctx.run_with_caching(old_program).await {
|
||||
let report = err.into_miette_report_with_outputs(code).unwrap();
|
||||
let report = miette::Report::new(report);
|
||||
panic!("Error executing program: {:?}", report);
|
||||
panic!("Error executing program: {report:?}");
|
||||
}
|
||||
|
||||
// Get the id_generator from the first execution.
|
||||
|
@ -8,10 +8,10 @@ use uuid::Uuid;
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::exec::ArtifactCommand;
|
||||
use crate::{
|
||||
ExecState, ExecutorContext, KclError, SourceRange,
|
||||
exec::{IdGenerator, KclValue},
|
||||
execution::Solid,
|
||||
std::Args,
|
||||
ExecState, ExecutorContext, KclError, SourceRange,
|
||||
};
|
||||
|
||||
/// Context and metadata needed to send a single modeling command.
|
||||
|
@ -9,20 +9,19 @@ use uuid::Uuid;
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::execution::{Artifact, ArtifactCommand, ArtifactGraph, ArtifactId};
|
||||
use crate::{
|
||||
CompilationError, EngineManager, ExecutorContext, KclErrorWithOutputs,
|
||||
errors::{KclError, KclErrorDetails, Severity},
|
||||
exec::DefaultPlanes,
|
||||
execution::{
|
||||
annotations,
|
||||
EnvironmentRef, ExecOutcome, ExecutorSettings, KclValue, UnitAngle, UnitLen, annotations,
|
||||
cad_op::Operation,
|
||||
id_generator::IdGenerator,
|
||||
memory::{ProgramMemory, Stack},
|
||||
types::{self, NumericType},
|
||||
EnvironmentRef, ExecOutcome, ExecutorSettings, KclValue, UnitAngle, UnitLen,
|
||||
},
|
||||
modules::{ModuleId, ModuleInfo, ModuleLoader, ModulePath, ModuleRepr, ModuleSource},
|
||||
parsing::ast::types::{Annotation, NodeRef},
|
||||
source_range::SourceRange,
|
||||
CompilationError, EngineManager, ExecutorContext, KclErrorWithOutputs,
|
||||
};
|
||||
|
||||
/// State for executing a program.
|
||||
@ -555,7 +554,7 @@ impl MetaSettings {
|
||||
annotations::SETTINGS_UNIT_ANGLE
|
||||
),
|
||||
vec![annotation.as_source_range()],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -220,9 +220,9 @@ impl schemars::JsonSchema for TypedPath {
|
||||
"TypedPath".to_owned()
|
||||
}
|
||||
|
||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
// TODO: Actually generate a reasonable schema.
|
||||
gen.subschema_for::<std::path::PathBuf>()
|
||||
r#gen.subschema_for::<std::path::PathBuf>()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,17 +5,17 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
CompilationError, SourceRange,
|
||||
execution::{
|
||||
ExecState, Plane, PlaneInfo, Point3d,
|
||||
kcl_value::{KclValue, TypeDef},
|
||||
memory::{self},
|
||||
ExecState, Plane, PlaneInfo, Point3d,
|
||||
},
|
||||
parsing::{
|
||||
ast::types::{PrimitiveType as AstPrimitiveType, Type},
|
||||
token::NumericSuffix,
|
||||
},
|
||||
std::args::{FromKclValue, TyF64},
|
||||
CompilationError, SourceRange,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@ -210,7 +210,7 @@ impl RuntimeType {
|
||||
let ty_val = exec_state
|
||||
.stack()
|
||||
.get(&format!("{}{}", memory::TYPE_PREFIX, alias), source_range)
|
||||
.map_err(|_| CompilationError::err(source_range, format!("Unknown type: {}", alias)))?;
|
||||
.map_err(|_| CompilationError::err(source_range, format!("Unknown type: {alias}")))?;
|
||||
|
||||
Ok(match ty_val {
|
||||
KclValue::Type { value, .. } => match value {
|
||||
@ -241,7 +241,7 @@ impl RuntimeType {
|
||||
"a tuple with values of types ({})",
|
||||
tys.iter().map(Self::human_friendly_type).collect::<Vec<_>>().join(", ")
|
||||
),
|
||||
RuntimeType::Object(_) => format!("an object with fields {}", self),
|
||||
RuntimeType::Object(_) => format!("an object with fields {self}"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -840,6 +840,18 @@ pub enum UnitType {
|
||||
Angle(UnitAngle),
|
||||
}
|
||||
|
||||
impl UnitType {
|
||||
pub(crate) fn to_suffix(self) -> Option<String> {
|
||||
match self {
|
||||
UnitType::Count => Some("_".to_owned()),
|
||||
UnitType::Length(UnitLen::Unknown) => None,
|
||||
UnitType::Angle(UnitAngle::Unknown) => None,
|
||||
UnitType::Length(l) => Some(l.to_string()),
|
||||
UnitType::Angle(a) => Some(a.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UnitType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@ -1529,7 +1541,7 @@ impl KclValue {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::execution::{parse_execute, ExecTestResults};
|
||||
use crate::execution::{ExecTestResults, parse_execute};
|
||||
|
||||
fn values(exec_state: &mut ExecState) -> Vec<KclValue> {
|
||||
vec![
|
||||
@ -1975,14 +1987,16 @@ mod test {
|
||||
])
|
||||
)
|
||||
);
|
||||
assert!(RuntimeType::Union(vec![
|
||||
assert!(
|
||||
RuntimeType::Union(vec![
|
||||
RuntimeType::Primitive(PrimitiveType::Number(NumericType::Any)),
|
||||
RuntimeType::Primitive(PrimitiveType::Boolean)
|
||||
])
|
||||
.subtype(&RuntimeType::Union(vec![
|
||||
RuntimeType::Primitive(PrimitiveType::Number(NumericType::Any)),
|
||||
RuntimeType::Primitive(PrimitiveType::Boolean)
|
||||
])));
|
||||
]))
|
||||
);
|
||||
|
||||
// Covariance
|
||||
let count = KclValue::Number {
|
||||
|
@ -45,6 +45,31 @@ pub fn format_number_literal(value: f64, suffix: NumericSuffix) -> Result<String
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, thiserror::Error)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum FormatNumericTypeError {
|
||||
#[error("Invalid numeric type: {0:?}")]
|
||||
Invalid(NumericType),
|
||||
}
|
||||
|
||||
/// For UI code generation, format a number value with a suffix such that the
|
||||
/// result can parse as a literal. If it can't be done, returns an error.
|
||||
///
|
||||
/// This is used by TS.
|
||||
pub fn format_number_value(value: f64, ty: NumericType) -> Result<String, FormatNumericTypeError> {
|
||||
match ty {
|
||||
NumericType::Default { .. } => Ok(value.to_string()),
|
||||
// There isn't a syntactic suffix for these. For unknown, we don't want
|
||||
// to ever generate the unknown suffix. We currently warn on it, and we
|
||||
// may remove it in the future.
|
||||
NumericType::Unknown | NumericType::Any => Err(FormatNumericTypeError::Invalid(ty)),
|
||||
NumericType::Known(unit_type) => unit_type
|
||||
.to_suffix()
|
||||
.map(|suffix| format!("{value}{suffix}"))
|
||||
.ok_or(FormatNumericTypeError::Invalid(ty)),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
@ -134,4 +159,74 @@ mod tests {
|
||||
Err(FormatNumericSuffixError::Invalid(NumericSuffix::Unknown))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_number_value() {
|
||||
assert_eq!(
|
||||
format_number_value(
|
||||
1.0,
|
||||
NumericType::Default {
|
||||
len: Default::default(),
|
||||
angle: Default::default()
|
||||
}
|
||||
),
|
||||
Ok("1".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Unknown))),
|
||||
Err(FormatNumericTypeError::Invalid(NumericType::Known(UnitType::Length(
|
||||
UnitLen::Unknown
|
||||
))))
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Unknown))),
|
||||
Err(FormatNumericTypeError::Invalid(NumericType::Known(UnitType::Angle(
|
||||
UnitAngle::Unknown
|
||||
))))
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Count)),
|
||||
Ok("1_".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Mm))),
|
||||
Ok("1mm".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Cm))),
|
||||
Ok("1cm".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::M))),
|
||||
Ok("1m".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Inches))),
|
||||
Ok("1in".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Feet))),
|
||||
Ok("1ft".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Length(UnitLen::Yards))),
|
||||
Ok("1yd".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Degrees))),
|
||||
Ok("1deg".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Known(UnitType::Angle(UnitAngle::Radians))),
|
||||
Ok("1rad".to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Unknown),
|
||||
Err(FormatNumericTypeError::Invalid(NumericType::Unknown))
|
||||
);
|
||||
assert_eq!(
|
||||
format_number_value(1.0, NumericType::Any),
|
||||
Err(FormatNumericTypeError::Invalid(NumericType::Any))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -3,10 +3,10 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::typed_path::TypedPath,
|
||||
fs::FileSystem,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{execution::typed_path::TypedPath, SourceRange};
|
||||
use crate::{SourceRange, execution::typed_path::TypedPath};
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod local;
|
||||
|
@ -4,11 +4,11 @@ use anyhow::Result;
|
||||
use wasm_bindgen::prelude::wasm_bindgen;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::typed_path::TypedPath,
|
||||
fs::FileSystem,
|
||||
wasm::JsFuture,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[wasm_bindgen(module = "/../../src/lang/std/fileSystemManager.ts")]
|
||||
|
@ -90,10 +90,9 @@ pub use errors::{
|
||||
ReportWithOutputs,
|
||||
};
|
||||
pub use execution::{
|
||||
bust_cache, clear_mem_cache,
|
||||
ExecOutcome, ExecState, ExecutorContext, ExecutorSettings, MetaSettings, Point2d, bust_cache, clear_mem_cache,
|
||||
typed_path::TypedPath,
|
||||
types::{UnitAngle, UnitLen},
|
||||
ExecOutcome, ExecState, ExecutorContext, ExecutorSettings, MetaSettings, Point2d,
|
||||
};
|
||||
pub use lsp::{
|
||||
copilot::Backend as CopilotLspBackend,
|
||||
@ -101,7 +100,7 @@ pub use lsp::{
|
||||
};
|
||||
pub use modules::ModuleId;
|
||||
pub use parsing::ast::types::{FormatOptions, NodePath, Step as NodePathStep};
|
||||
pub use settings::types::{project::ProjectConfiguration, Configuration, UnitLength};
|
||||
pub use settings::types::{Configuration, UnitLength, project::ProjectConfiguration};
|
||||
pub use source_range::SourceRange;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub use unparser::{recast_dir, walk_dir};
|
||||
@ -109,12 +108,12 @@ pub use unparser::{recast_dir, walk_dir};
|
||||
// Rather than make executor public and make lots of it pub(crate), just re-export into a new module.
|
||||
// Ideally we wouldn't export these things at all, they should only be used for testing.
|
||||
pub mod exec {
|
||||
pub use crate::execution::{
|
||||
types::{NumericType, UnitAngle, UnitLen, UnitType},
|
||||
DefaultPlanes, IdGenerator, KclValue, PlaneType, Sketch,
|
||||
};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
pub use crate::execution::{ArtifactCommand, Operation};
|
||||
pub use crate::execution::{
|
||||
DefaultPlanes, IdGenerator, KclValue, PlaneType, Sketch,
|
||||
types::{NumericType, UnitAngle, UnitLen, UnitType},
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
@ -136,12 +135,12 @@ pub mod native_engine {
|
||||
}
|
||||
|
||||
pub mod std_utils {
|
||||
pub use crate::std::utils::{get_tangential_arc_to_info, is_points_ccw_wasm, TangentialArcInfoInput};
|
||||
pub use crate::std::utils::{TangentialArcInfoInput, get_tangential_arc_to_info, is_points_ccw_wasm};
|
||||
}
|
||||
|
||||
pub mod pretty {
|
||||
pub use crate::{
|
||||
fmt::{format_number_literal, human_display_number},
|
||||
fmt::{format_number_literal, format_number_value, human_display_number},
|
||||
parsing::token::NumericSuffix,
|
||||
};
|
||||
}
|
||||
@ -160,7 +159,7 @@ lazy_static::lazy_static! {
|
||||
#[cfg(feature = "cli")]
|
||||
let named_extensions = kittycad::types::FileImportFormat::value_variants()
|
||||
.iter()
|
||||
.map(|x| format!("{}", x))
|
||||
.map(|x| format!("{x}"))
|
||||
.collect::<Vec<String>>();
|
||||
#[cfg(not(feature = "cli"))]
|
||||
let named_extensions = vec![]; // We don't really need this outside of the CLI.
|
||||
@ -276,41 +275,25 @@ impl Program {
|
||||
#[inline]
|
||||
fn try_f64_to_usize(f: f64) -> Option<usize> {
|
||||
let i = f as usize;
|
||||
if i as f64 == f {
|
||||
Some(i)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if i as f64 == f { Some(i) } else { None }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_f64_to_u32(f: f64) -> Option<u32> {
|
||||
let i = f as u32;
|
||||
if i as f64 == f {
|
||||
Some(i)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if i as f64 == f { Some(i) } else { None }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_f64_to_u64(f: f64) -> Option<u64> {
|
||||
let i = f as u64;
|
||||
if i as f64 == f {
|
||||
Some(i)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if i as f64 == f { Some(i) } else { None }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn try_f64_to_i64(f: f64) -> Option<i64> {
|
||||
let i = f as i64;
|
||||
if i as f64 == f {
|
||||
Some(i)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if i as f64 == f { Some(i) } else { None }
|
||||
}
|
||||
|
||||
/// Get the version of the KCL library.
|
||||
|
@ -2,11 +2,11 @@ use anyhow::Result;
|
||||
use convert_case::Casing;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::Suggestion,
|
||||
lint::rule::{def_finding, Discovered, Finding},
|
||||
lint::rule::{Discovered, Finding, def_finding},
|
||||
parsing::ast::types::{Node as AstNode, ObjectProperty, Program, VariableDeclarator},
|
||||
walk::Node,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
def_finding!(
|
||||
@ -38,12 +38,12 @@ fn lint_lower_camel_case_var(decl: &VariableDeclarator, prog: &AstNode<Program>)
|
||||
let recast = prog.recast(&Default::default(), 0);
|
||||
|
||||
let suggestion = Suggestion {
|
||||
title: format!("rename '{}' to '{}'", name, new_name),
|
||||
title: format!("rename '{name}' to '{new_name}'"),
|
||||
insert: recast,
|
||||
source_range: prog.as_source_range(),
|
||||
};
|
||||
findings.push(Z0001.at(
|
||||
format!("found '{}'", name),
|
||||
format!("found '{name}'"),
|
||||
SourceRange::new(ident.start, ident.end, ident.module_id),
|
||||
Some(suggestion.clone()),
|
||||
));
|
||||
@ -61,7 +61,7 @@ fn lint_lower_camel_case_property(decl: &ObjectProperty, _prog: &AstNode<Program
|
||||
if !name.is_case(convert_case::Case::Camel) {
|
||||
// We can't rename the properties yet.
|
||||
findings.push(Z0001.at(
|
||||
format!("found '{}'", name),
|
||||
format!("found '{name}'"),
|
||||
SourceRange::new(ident.start, ident.end, ident.module_id),
|
||||
None,
|
||||
));
|
||||
@ -93,7 +93,7 @@ pub fn lint_object_properties(decl: Node, prog: &AstNode<Program>) -> Result<Vec
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{lint_object_properties, lint_variables, Z0001};
|
||||
use super::{Z0001, lint_object_properties, lint_variables};
|
||||
use crate::lint::rule::{assert_finding, test_finding, test_no_finding};
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -4,7 +4,7 @@ use crate::{
|
||||
errors::Suggestion,
|
||||
lint::{
|
||||
checks::offset_plane::start_sketch_on_check_specific_plane,
|
||||
rule::{def_finding, Discovered, Finding},
|
||||
rule::{Discovered, Finding, def_finding},
|
||||
},
|
||||
parsing::ast::types::{Node as AstNode, Program},
|
||||
walk::Node,
|
||||
@ -33,14 +33,11 @@ pub fn lint_should_be_default_plane(node: Node, _prog: &AstNode<Program>) -> Res
|
||||
}
|
||||
let suggestion = Suggestion {
|
||||
title: "use defaultPlane instead".to_owned(),
|
||||
insert: format!("{}", plane_name),
|
||||
insert: format!("{plane_name}"),
|
||||
source_range: call_source_range,
|
||||
};
|
||||
Ok(vec![Z0002.at(
|
||||
format!(
|
||||
"custom plane in startSketchOn; defaultPlane {} would work here",
|
||||
plane_name
|
||||
),
|
||||
format!("custom plane in startSketchOn; defaultPlane {plane_name} would work here"),
|
||||
call_source_range,
|
||||
Some(suggestion),
|
||||
)])
|
||||
@ -48,7 +45,7 @@ pub fn lint_should_be_default_plane(node: Node, _prog: &AstNode<Program>) -> Res
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{lint_should_be_default_plane, Z0002};
|
||||
use super::{Z0002, lint_should_be_default_plane};
|
||||
use crate::lint::rule::{test_finding, test_no_finding};
|
||||
|
||||
test_finding!(
|
||||
|
@ -2,6 +2,6 @@ mod camel_case;
|
||||
mod default_plane;
|
||||
mod offset_plane;
|
||||
|
||||
pub use camel_case::{lint_object_properties, lint_variables, Z0001};
|
||||
pub use default_plane::{lint_should_be_default_plane, Z0002};
|
||||
pub use offset_plane::{lint_should_be_offset_plane, Z0003};
|
||||
pub use camel_case::{Z0001, lint_object_properties, lint_variables};
|
||||
pub use default_plane::{Z0002, lint_should_be_default_plane};
|
||||
pub use offset_plane::{Z0003, lint_should_be_offset_plane};
|
||||
|
@ -1,15 +1,15 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
engine::{PlaneName, DEFAULT_PLANE_INFO},
|
||||
SourceRange,
|
||||
engine::{DEFAULT_PLANE_INFO, PlaneName},
|
||||
errors::Suggestion,
|
||||
execution::{types::UnitLen, PlaneInfo, Point3d},
|
||||
lint::rule::{def_finding, Discovered, Finding},
|
||||
execution::{PlaneInfo, Point3d, types::UnitLen},
|
||||
lint::rule::{Discovered, Finding, def_finding},
|
||||
parsing::ast::types::{
|
||||
BinaryPart, CallExpressionKw, Expr, LiteralValue, Node as AstNode, ObjectExpression, Program, UnaryOperator,
|
||||
},
|
||||
walk::Node,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
def_finding!(
|
||||
@ -39,14 +39,11 @@ pub fn lint_should_be_offset_plane(node: Node, _prog: &AstNode<Program>) -> Resu
|
||||
}
|
||||
let suggestion = Suggestion {
|
||||
title: "use offsetPlane instead".to_owned(),
|
||||
insert: format!("offsetPlane({}, offset = {})", plane_name, offset),
|
||||
insert: format!("offsetPlane({plane_name}, offset = {offset})"),
|
||||
source_range: call_source_range,
|
||||
};
|
||||
Ok(vec![Z0003.at(
|
||||
format!(
|
||||
"custom plane in startSketchOn; offsetPlane from {} would work here",
|
||||
plane_name
|
||||
),
|
||||
format!("custom plane in startSketchOn; offsetPlane from {plane_name} would work here"),
|
||||
call_source_range,
|
||||
Some(suggestion),
|
||||
)])
|
||||
@ -68,16 +65,16 @@ fn get_xyz(point: &ObjectExpression) -> Option<(f64, f64, f64)> {
|
||||
|
||||
for property in &point.properties {
|
||||
let Some(value) = (match &property.value {
|
||||
Expr::UnaryExpression(ref value) => {
|
||||
Expr::UnaryExpression(value) => {
|
||||
if value.operator != UnaryOperator::Neg {
|
||||
continue;
|
||||
}
|
||||
let BinaryPart::Literal(ref value) = &value.inner.argument else {
|
||||
let BinaryPart::Literal(value) = &value.inner.argument else {
|
||||
continue;
|
||||
};
|
||||
unlitafy(&value.inner.value).map(|v| -v)
|
||||
}
|
||||
Expr::Literal(ref value) => unlitafy(&value.value),
|
||||
Expr::Literal(value) => unlitafy(&value.value),
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
@ -271,7 +268,7 @@ fn normalize_plane_info(plane_info: &PlaneInfo) -> PlaneInfo {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{lint_should_be_offset_plane, Z0003};
|
||||
use super::{Z0003, lint_should_be_offset_plane};
|
||||
use crate::lint::rule::{test_finding, test_no_finding};
|
||||
|
||||
test_finding!(
|
||||
|
@ -4,11 +4,11 @@ use serde::Serialize;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::Suggestion,
|
||||
lsp::IntoDiagnostic,
|
||||
parsing::ast::types::{Node as AstNode, Program},
|
||||
walk::Node,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
/// Check the provided AST for any found rule violations.
|
||||
@ -180,7 +180,7 @@ impl Finding {
|
||||
}
|
||||
|
||||
macro_rules! def_finding {
|
||||
( $code:ident, $title:expr, $description:expr ) => {
|
||||
( $code:ident, $title:expr_2021, $description:expr_2021 ) => {
|
||||
/// Generated Finding
|
||||
pub const $code: Finding = $crate::lint::rule::finding!($code, $title, $description);
|
||||
};
|
||||
@ -188,7 +188,7 @@ macro_rules! def_finding {
|
||||
pub(crate) use def_finding;
|
||||
|
||||
macro_rules! finding {
|
||||
( $code:ident, $title:expr, $description:expr ) => {
|
||||
( $code:ident, $title:expr_2021, $description:expr_2021 ) => {
|
||||
$crate::lint::rule::Finding {
|
||||
code: stringify!($code),
|
||||
title: $title,
|
||||
@ -205,7 +205,7 @@ pub(crate) use test::{assert_finding, assert_no_finding, test_finding, test_no_f
|
||||
mod test {
|
||||
|
||||
macro_rules! assert_no_finding {
|
||||
( $check:expr, $finding:expr, $kcl:expr ) => {
|
||||
( $check:expr_2021, $finding:expr_2021, $kcl:expr_2021 ) => {
|
||||
let prog = $crate::Program::parse_no_errs($kcl).unwrap();
|
||||
|
||||
// Ensure the code still works.
|
||||
@ -220,7 +220,7 @@ mod test {
|
||||
}
|
||||
|
||||
macro_rules! assert_finding {
|
||||
( $check:expr, $finding:expr, $kcl:expr, $output:expr, $suggestion:expr ) => {
|
||||
( $check:expr_2021, $finding:expr_2021, $kcl:expr_2021, $output:expr_2021, $suggestion:expr_2021 ) => {
|
||||
let prog = $crate::Program::parse_no_errs($kcl).unwrap();
|
||||
|
||||
// Ensure the code still works.
|
||||
@ -250,7 +250,7 @@ mod test {
|
||||
}
|
||||
|
||||
macro_rules! test_finding {
|
||||
( $name:ident, $check:expr, $finding:expr, $kcl:expr, $output:expr, $suggestion:expr ) => {
|
||||
( $name:ident, $check:expr_2021, $finding:expr_2021, $kcl:expr_2021, $output:expr_2021, $suggestion:expr_2021 ) => {
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
$crate::lint::rule::assert_finding!($check, $finding, $kcl, $output, $suggestion);
|
||||
@ -259,7 +259,7 @@ mod test {
|
||||
}
|
||||
|
||||
macro_rules! test_no_finding {
|
||||
( $name:ident, $check:expr, $finding:expr, $kcl:expr ) => {
|
||||
( $name:ident, $check:expr_2021, $finding:expr_2021, $kcl:expr_2021 ) => {
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
$crate::lint::rule::assert_no_finding!($check, $finding, $kcl);
|
||||
|
@ -90,7 +90,7 @@ where
|
||||
|
||||
async fn do_initialized(&self, params: InitializedParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("initialized: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("initialized: {params:?}"))
|
||||
.await;
|
||||
|
||||
self.set_is_initialized(true).await;
|
||||
@ -139,7 +139,7 @@ where
|
||||
self.client()
|
||||
.log_message(
|
||||
MessageType::WARNING,
|
||||
format!("updating from disk `{}` failed: {:?}", project_dir, err),
|
||||
format!("updating from disk `{project_dir}` failed: {err:?}"),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
@ -148,19 +148,19 @@ where
|
||||
|
||||
async fn do_did_change_configuration(&self, params: DidChangeConfigurationParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("configuration changed: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("configuration changed: {params:?}"))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn do_did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("watched files changed: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("watched files changed: {params:?}"))
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn do_did_create_files(&self, params: CreateFilesParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("files created: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("files created: {params:?}"))
|
||||
.await;
|
||||
// Create each file in the code map.
|
||||
for file in params.files {
|
||||
@ -170,7 +170,7 @@ where
|
||||
|
||||
async fn do_did_rename_files(&self, params: RenameFilesParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("files renamed: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("files renamed: {params:?}"))
|
||||
.await;
|
||||
// Rename each file in the code map.
|
||||
for file in params.files {
|
||||
@ -186,7 +186,7 @@ where
|
||||
|
||||
async fn do_did_delete_files(&self, params: DeleteFilesParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("files deleted: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("files deleted: {params:?}"))
|
||||
.await;
|
||||
// Delete each file in the map.
|
||||
for file in params.files {
|
||||
@ -228,7 +228,7 @@ where
|
||||
|
||||
async fn do_did_close(&self, params: DidCloseTextDocumentParams) {
|
||||
self.client()
|
||||
.log_message(MessageType::INFO, format!("document closed: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("document closed: {params:?}"))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ use std::{
|
||||
use dashmap::DashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::{
|
||||
LanguageServer,
|
||||
jsonrpc::{Error, Result},
|
||||
lsp_types::{
|
||||
CreateFilesParams, DeleteFilesParams, Diagnostic, DidChangeConfigurationParams, DidChangeTextDocumentParams,
|
||||
@ -22,7 +23,6 @@ use tower_lsp::{
|
||||
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
|
||||
WorkspaceServerCapabilities,
|
||||
},
|
||||
LanguageServer,
|
||||
};
|
||||
|
||||
use crate::lsp::{
|
||||
@ -198,7 +198,7 @@ impl Backend {
|
||||
.map_err(|err| Error {
|
||||
code: tower_lsp::jsonrpc::ErrorCode::from(69),
|
||||
data: None,
|
||||
message: Cow::from(format!("Failed to get completions from zoo api: {}", err)),
|
||||
message: Cow::from(format!("Failed to get completions from zoo api: {err}")),
|
||||
})?;
|
||||
Ok(resp.completions)
|
||||
}
|
||||
@ -209,7 +209,7 @@ impl Backend {
|
||||
let mut lock = copy.write().map_err(|err| Error {
|
||||
code: tower_lsp::jsonrpc::ErrorCode::from(69),
|
||||
data: None,
|
||||
message: Cow::from(format!("Failed lock: {}", err)),
|
||||
message: Cow::from(format!("Failed lock: {err}")),
|
||||
})?;
|
||||
*lock = params;
|
||||
Ok(Success::new(true))
|
||||
@ -254,7 +254,7 @@ impl Backend {
|
||||
.map_err(|err| Error {
|
||||
code: tower_lsp::jsonrpc::ErrorCode::from(69),
|
||||
data: None,
|
||||
message: Cow::from(format!("Failed to get completions: {}", err)),
|
||||
message: Cow::from(format!("Failed to get completions: {err}")),
|
||||
})?;
|
||||
#[cfg(not(test))]
|
||||
let mut completion_list = vec![];
|
||||
@ -294,7 +294,7 @@ part001 = cube(pos = [0,0], scale = 20)
|
||||
|
||||
pub async fn accept_completion(&self, params: CopilotAcceptCompletionParams) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("Accepted completions: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("Accepted completions: {params:?}"))
|
||||
.await;
|
||||
|
||||
// Get the original telemetry data.
|
||||
@ -303,7 +303,7 @@ part001 = cube(pos = [0,0], scale = 20)
|
||||
};
|
||||
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("Original telemetry: {:?}", original))
|
||||
.log_message(MessageType::INFO, format!("Original telemetry: {original:?}"))
|
||||
.await;
|
||||
|
||||
// TODO: Send the telemetry data to the zoo api.
|
||||
@ -311,7 +311,7 @@ part001 = cube(pos = [0,0], scale = 20)
|
||||
|
||||
pub async fn reject_completions(&self, params: CopilotRejectCompletionParams) {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("Rejected completions: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("Rejected completions: {params:?}"))
|
||||
.await;
|
||||
|
||||
// Get the original telemetry data.
|
||||
@ -323,7 +323,7 @@ part001 = cube(pos = [0,0], scale = 20)
|
||||
}
|
||||
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("Original telemetry: {:?}", originals))
|
||||
.log_message(MessageType::INFO, format!("Original telemetry: {originals:?}"))
|
||||
.await;
|
||||
|
||||
// TODO: Send the telemetry data to the zoo api.
|
||||
|
@ -85,7 +85,7 @@ impl CopilotCompletionResponse {
|
||||
impl CopilotCyclingCompletion {
|
||||
pub fn new(text: String, line_before: String, position: CopilotPosition) -> Self {
|
||||
let display_text = text.clone();
|
||||
let text = format!("{}{}", line_before, text);
|
||||
let text = format!("{line_before}{text}");
|
||||
let end_char = text.find('\n').unwrap_or(text.len()) as u32;
|
||||
Self {
|
||||
uuid: uuid::Uuid::new_v4(),
|
||||
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::lsp_types::Range as LspRange;
|
||||
|
||||
use crate::{parsing::ast::types::*, SourceRange};
|
||||
use crate::{SourceRange, parsing::ast::types::*};
|
||||
|
||||
/// Describes information about a hover.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||
|
@ -15,6 +15,7 @@ use dashmap::DashMap;
|
||||
use sha2::Digest;
|
||||
use tokio::sync::RwLock;
|
||||
use tower_lsp::{
|
||||
Client, LanguageServer,
|
||||
jsonrpc::Result as RpcResult,
|
||||
lsp_types::{
|
||||
CodeAction, CodeActionKind, CodeActionOptions, CodeActionOrCommand, CodeActionParams,
|
||||
@ -37,10 +38,10 @@ use tower_lsp::{
|
||||
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions,
|
||||
WorkspaceEdit, WorkspaceFolder, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||
},
|
||||
Client, LanguageServer,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ModuleId, Program, SourceRange,
|
||||
docs::kcl_doc::ModData,
|
||||
errors::LspSuggestion,
|
||||
exec::KclValue,
|
||||
@ -51,11 +52,10 @@ use crate::{
|
||||
util::IntoDiagnostic,
|
||||
},
|
||||
parsing::{
|
||||
PIPE_OPERATOR,
|
||||
ast::types::{Expr, VariableKind},
|
||||
token::TokenStream,
|
||||
PIPE_OPERATOR,
|
||||
},
|
||||
ModuleId, Program, SourceRange,
|
||||
};
|
||||
|
||||
pub mod custom_notifications;
|
||||
@ -290,10 +290,9 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
};
|
||||
|
||||
// Get the previous tokens.
|
||||
let tokens_changed = if let Some(previous_tokens) = self.token_map.get(&filename) {
|
||||
*previous_tokens != tokens
|
||||
} else {
|
||||
true
|
||||
let tokens_changed = match self.token_map.get(&filename) {
|
||||
Some(previous_tokens) => *previous_tokens != tokens,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
let had_diagnostics = self.has_diagnostics(params.uri.as_ref()).await;
|
||||
@ -424,7 +423,7 @@ impl Backend {
|
||||
self.client
|
||||
.log_message(
|
||||
MessageType::ERROR,
|
||||
format!("token type `{:?}` not accounted for", token_type),
|
||||
format!("token type `{token_type:?}` not accounted for"),
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
@ -436,7 +435,8 @@ impl Backend {
|
||||
|
||||
// Calculate the token modifiers.
|
||||
// Get the value at the current position.
|
||||
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(params.uri.as_str()) {
|
||||
let token_modifiers_bitset = match self.ast_map.get(params.uri.as_str()) {
|
||||
Some(ast) => {
|
||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||
crate::walk::walk(&ast.ast, |node: crate::walk::Node| {
|
||||
@ -538,17 +538,18 @@ impl Backend {
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let t = if let Ok(guard) = token_index.lock() { *guard } else { 0 };
|
||||
let t = match token_index.lock() {
|
||||
Ok(guard) => *guard,
|
||||
_ => 0,
|
||||
};
|
||||
token_type_index = t;
|
||||
|
||||
let m = if let Ok(guard) = modifier_index.lock() {
|
||||
*guard
|
||||
} else {
|
||||
0
|
||||
};
|
||||
m
|
||||
} else {
|
||||
0
|
||||
match modifier_index.lock() {
|
||||
Ok(guard) => *guard,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
// We need to check if we are on the last token of the line.
|
||||
@ -652,11 +653,14 @@ impl Backend {
|
||||
.await;
|
||||
}
|
||||
|
||||
let mut items = if let Some(items) = self.diagnostics_map.get(params.uri.as_str()) {
|
||||
let mut items = match self.diagnostics_map.get(params.uri.as_str()) {
|
||||
Some(items) => {
|
||||
// TODO: Would be awesome to fix the clone here.
|
||||
items.clone()
|
||||
} else {
|
||||
}
|
||||
_ => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
@ -768,7 +772,7 @@ impl Backend {
|
||||
// Read hash digest and consume hasher
|
||||
let result = hasher.finalize();
|
||||
// Get the hash as a string.
|
||||
let user_id_hash = format!("{:x}", result);
|
||||
let user_id_hash = format!("{result:x}");
|
||||
|
||||
// Get the workspace folders.
|
||||
// The key of the workspace folder is the project name.
|
||||
@ -866,7 +870,7 @@ impl Backend {
|
||||
impl LanguageServer for Backend {
|
||||
async fn initialize(&self, params: InitializeParams) -> RpcResult<InitializeResult> {
|
||||
self.client
|
||||
.log_message(MessageType::INFO, format!("initialize: {:?}", params))
|
||||
.log_message(MessageType::INFO, format!("initialize: {params:?}"))
|
||||
.await;
|
||||
|
||||
Ok(InitializeResult {
|
||||
@ -1006,7 +1010,7 @@ impl LanguageServer for Backend {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
if let Err(err) = self.send_telemetry().await {
|
||||
self.client
|
||||
.log_message(MessageType::WARNING, format!("failed to send telemetry: {}", err))
|
||||
.log_message(MessageType::WARNING, format!("failed to send telemetry: {err}"))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
@ -1090,7 +1094,7 @@ impl LanguageServer for Backend {
|
||||
Ok(Some(LspHover {
|
||||
contents: HoverContents::Markup(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: format!("```\n{}{}\n```\n\n{}", name, sig, docs),
|
||||
value: format!("```\n{name}{sig}\n```\n\n{docs}"),
|
||||
}),
|
||||
range: Some(range),
|
||||
}))
|
||||
@ -1118,7 +1122,7 @@ impl LanguageServer for Backend {
|
||||
Ok(Some(LspHover {
|
||||
contents: HoverContents::Markup(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: format!("```\n{}\n```\n\n{}", name, docs),
|
||||
value: format!("```\n{name}\n```\n\n{docs}"),
|
||||
}),
|
||||
range: Some(range),
|
||||
}))
|
||||
@ -1153,17 +1157,17 @@ impl LanguageServer for Backend {
|
||||
} => Ok(Some(LspHover {
|
||||
contents: HoverContents::Markup(MarkupContent {
|
||||
kind: MarkupKind::Markdown,
|
||||
value: format!("```\n{}: {}\n```", name, ty),
|
||||
value: format!("```\n{name}: {ty}\n```"),
|
||||
}),
|
||||
range: Some(range),
|
||||
})),
|
||||
Hover::Variable { name, ty: None, range } => Ok(with_cached_var(&name, |value| {
|
||||
let mut text: String = format!("```\n{}", name);
|
||||
let mut text: String = format!("```\n{name}");
|
||||
if let Some(ty) = value.principal_type() {
|
||||
text.push_str(&format!(": {}", ty.human_friendly_type()));
|
||||
}
|
||||
if let Some(v) = value.value_str() {
|
||||
text.push_str(&format!(" = {}", v));
|
||||
text.push_str(&format!(" = {v}"));
|
||||
}
|
||||
text.push_str("\n```");
|
||||
|
||||
|
@ -13,8 +13,8 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, DiagnosticTag};
|
||||
pub use util::IntoDiagnostic;
|
||||
|
||||
use crate::{
|
||||
errors::{Severity, Tag},
|
||||
CompilationError,
|
||||
errors::{Severity, Tag},
|
||||
};
|
||||
|
||||
impl IntoDiagnostic for CompilationError {
|
||||
|
@ -2,18 +2,18 @@ use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
use tower_lsp::{
|
||||
LanguageServer,
|
||||
lsp_types::{
|
||||
CodeActionKind, CodeActionOrCommand, Diagnostic, PrepareRenameResponse, SemanticTokenModifier,
|
||||
SemanticTokenType, TextEdit, WorkspaceEdit,
|
||||
},
|
||||
LanguageServer,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::{LspSuggestion, Suggestion},
|
||||
lsp::test_util::{copilot_lsp_server, kcl_lsp_server},
|
||||
parsing::ast::types::{Node, Program},
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
#[track_caller]
|
||||
@ -276,11 +276,7 @@ async fn test_updating_kcl_lsp_files() {
|
||||
assert_eq!(server.code_map.len(), 11);
|
||||
// Just make sure that one of the current files read from disk is accurate.
|
||||
assert_eq!(
|
||||
server
|
||||
.code_map
|
||||
.get(&format!("{}/util.rs", string_path))
|
||||
.unwrap()
|
||||
.clone(),
|
||||
server.code_map.get(&format!("{string_path}/util.rs")).unwrap().clone(),
|
||||
include_str!("util.rs").as_bytes()
|
||||
);
|
||||
}
|
||||
@ -633,7 +629,7 @@ async fn test_kcl_lsp_create_zip() {
|
||||
}
|
||||
|
||||
assert_eq!(files.len(), 12);
|
||||
let util_path = format!("{}/util.rs", string_path).replace("file://", "");
|
||||
let util_path = format!("{string_path}/util.rs").replace("file://", "");
|
||||
assert!(files.contains_key(&util_path));
|
||||
assert_eq!(files.get("/test.kcl"), Some(&4));
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
exec::KclValue,
|
||||
execution::{typed_path::TypedPath, EnvironmentRef, ModuleArtifactState, PreImportedGeometry},
|
||||
execution::{EnvironmentRef, ModuleArtifactState, PreImportedGeometry, typed_path::TypedPath},
|
||||
fs::{FileManager, FileSystem},
|
||||
parsing::ast::types::{ImportPath, Node, Program},
|
||||
source_range::SourceRange,
|
||||
@ -73,13 +73,13 @@ impl ModuleLoader {
|
||||
}
|
||||
|
||||
pub(crate) fn enter_module(&mut self, path: &ModulePath) {
|
||||
if let ModulePath::Local { value: ref path } = path {
|
||||
if let ModulePath::Local { value: path } = path {
|
||||
self.import_stack.push(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn leave_module(&mut self, path: &ModulePath) {
|
||||
if let ModulePath::Local { value: ref path } = path {
|
||||
if let ModulePath::Local { value: path } = path {
|
||||
let popped = self.import_stack.pop().unwrap();
|
||||
assert_eq!(path, &popped);
|
||||
}
|
||||
|
@ -2,8 +2,8 @@ pub(crate) mod digest;
|
||||
pub mod types;
|
||||
|
||||
use crate::{
|
||||
parsing::ast::types::{BinaryPart, BodyItem, Expr, LiteralIdentifier},
|
||||
ModuleId,
|
||||
parsing::ast::types::{BinaryPart, BodyItem, Expr, LiteralIdentifier},
|
||||
};
|
||||
|
||||
impl BodyItem {
|
||||
|
@ -25,15 +25,14 @@ pub use crate::parsing::ast::types::{
|
||||
none::KclNone,
|
||||
};
|
||||
use crate::{
|
||||
ModuleId, TypedPath,
|
||||
errors::KclError,
|
||||
execution::{
|
||||
annotations,
|
||||
KclValue, Metadata, TagIdentifier, annotations,
|
||||
types::{ArrayLen, UnitAngle, UnitLen},
|
||||
KclValue, Metadata, TagIdentifier,
|
||||
},
|
||||
parsing::{ast::digest::Digest, token::NumericSuffix, PIPE_OPERATOR},
|
||||
parsing::{PIPE_OPERATOR, ast::digest::Digest, token::NumericSuffix},
|
||||
source_range::SourceRange,
|
||||
ModuleId, TypedPath,
|
||||
};
|
||||
|
||||
mod condition;
|
||||
@ -72,18 +71,18 @@ impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
|
||||
T::schema_name()
|
||||
}
|
||||
|
||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
let mut child = T::json_schema(gen).into_object();
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
let mut child = T::json_schema(r#gen).into_object();
|
||||
// We want to add the start and end fields to the schema.
|
||||
// Ideally we would add _any_ extra fields from the Node type automatically
|
||||
// but this is a bit hard since this isn't a macro.
|
||||
let Some(ref mut object) = &mut child.object else {
|
||||
let Some(object) = &mut child.object else {
|
||||
// This should never happen. But it will panic at compile time of docs if it does.
|
||||
// Which is better than runtime.
|
||||
panic!("Expected object schema for {}", T::schema_name());
|
||||
};
|
||||
object.properties.insert("start".to_string(), usize::json_schema(gen));
|
||||
object.properties.insert("end".to_string(), usize::json_schema(gen));
|
||||
object.properties.insert("start".to_string(), usize::json_schema(r#gen));
|
||||
object.properties.insert("end".to_string(), usize::json_schema(r#gen));
|
||||
|
||||
schemars::schema::Schema::Object(child.clone())
|
||||
}
|
||||
@ -681,7 +680,7 @@ impl Program {
|
||||
break;
|
||||
}
|
||||
}
|
||||
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||
BodyItem::VariableDeclaration(variable_declaration) => {
|
||||
if let Some(var_old_name) = variable_declaration.rename_symbol(new_name, pos) {
|
||||
old_name = Some(var_old_name);
|
||||
break;
|
||||
@ -705,18 +704,16 @@ impl Program {
|
||||
// Recurse over the item.
|
||||
let mut value = match item {
|
||||
BodyItem::ImportStatement(_) => None, // TODO
|
||||
BodyItem::ExpressionStatement(ref mut expression_statement) => {
|
||||
Some(&mut expression_statement.expression)
|
||||
}
|
||||
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||
BodyItem::ExpressionStatement(expression_statement) => Some(&mut expression_statement.expression),
|
||||
BodyItem::VariableDeclaration(variable_declaration) => {
|
||||
variable_declaration.get_mut_expr_for_position(pos)
|
||||
}
|
||||
BodyItem::TypeDeclaration(_) => None,
|
||||
BodyItem::ReturnStatement(ref mut return_statement) => Some(&mut return_statement.argument),
|
||||
BodyItem::ReturnStatement(return_statement) => Some(&mut return_statement.argument),
|
||||
};
|
||||
|
||||
// Check if we have a function expression.
|
||||
if let Some(Expr::FunctionExpression(ref mut function_expression)) = &mut value {
|
||||
if let Some(Expr::FunctionExpression(function_expression)) = &mut value {
|
||||
// Check if the params to the function expression contain the position.
|
||||
for param in &mut function_expression.params {
|
||||
let param_source_range: SourceRange = (¶m.identifier).into();
|
||||
@ -764,7 +761,7 @@ impl Program {
|
||||
BodyItem::ExpressionStatement(_) => {
|
||||
continue;
|
||||
}
|
||||
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||
BodyItem::VariableDeclaration(variable_declaration) => {
|
||||
if variable_declaration.declaration.id.name == name {
|
||||
variable_declaration.declaration = declarator;
|
||||
return;
|
||||
@ -783,14 +780,14 @@ impl Program {
|
||||
for item in &mut self.body {
|
||||
match item {
|
||||
BodyItem::ImportStatement(_) => {} // TODO
|
||||
BodyItem::ExpressionStatement(ref mut expression_statement) => expression_statement
|
||||
BodyItem::ExpressionStatement(expression_statement) => expression_statement
|
||||
.expression
|
||||
.replace_value(source_range, new_value.clone()),
|
||||
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||
BodyItem::VariableDeclaration(variable_declaration) => {
|
||||
variable_declaration.replace_value(source_range, new_value.clone())
|
||||
}
|
||||
BodyItem::TypeDeclaration(_) => {}
|
||||
BodyItem::ReturnStatement(ref mut return_statement) => {
|
||||
BodyItem::ReturnStatement(return_statement) => {
|
||||
return_statement.argument.replace_value(source_range, new_value.clone())
|
||||
}
|
||||
}
|
||||
@ -1040,18 +1037,18 @@ impl Expr {
|
||||
}
|
||||
|
||||
match self {
|
||||
Expr::BinaryExpression(ref mut bin_exp) => bin_exp.replace_value(source_range, new_value),
|
||||
Expr::ArrayExpression(ref mut array_exp) => array_exp.replace_value(source_range, new_value),
|
||||
Expr::ArrayRangeExpression(ref mut array_range) => array_range.replace_value(source_range, new_value),
|
||||
Expr::ObjectExpression(ref mut obj_exp) => obj_exp.replace_value(source_range, new_value),
|
||||
Expr::BinaryExpression(bin_exp) => bin_exp.replace_value(source_range, new_value),
|
||||
Expr::ArrayExpression(array_exp) => array_exp.replace_value(source_range, new_value),
|
||||
Expr::ArrayRangeExpression(array_range) => array_range.replace_value(source_range, new_value),
|
||||
Expr::ObjectExpression(obj_exp) => obj_exp.replace_value(source_range, new_value),
|
||||
Expr::MemberExpression(_) => {}
|
||||
Expr::Literal(_) => {}
|
||||
Expr::FunctionExpression(ref mut func_exp) => func_exp.replace_value(source_range, new_value),
|
||||
Expr::CallExpressionKw(ref mut call_exp) => call_exp.replace_value(source_range, new_value),
|
||||
Expr::FunctionExpression(func_exp) => func_exp.replace_value(source_range, new_value),
|
||||
Expr::CallExpressionKw(call_exp) => call_exp.replace_value(source_range, new_value),
|
||||
Expr::Name(_) => {}
|
||||
Expr::TagDeclarator(_) => {}
|
||||
Expr::PipeExpression(ref mut pipe_exp) => pipe_exp.replace_value(source_range, new_value),
|
||||
Expr::UnaryExpression(ref mut unary_exp) => unary_exp.replace_value(source_range, new_value),
|
||||
Expr::PipeExpression(pipe_exp) => pipe_exp.replace_value(source_range, new_value),
|
||||
Expr::UnaryExpression(unary_exp) => unary_exp.replace_value(source_range, new_value),
|
||||
Expr::IfExpression(_) => {}
|
||||
Expr::PipeSubstitution(_) => {}
|
||||
Expr::LabelledExpression(expr) => expr.expr.replace_value(source_range, new_value),
|
||||
@ -1113,25 +1110,19 @@ impl Expr {
|
||||
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||
match self {
|
||||
Expr::Literal(_literal) => {}
|
||||
Expr::Name(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||
Expr::TagDeclarator(ref mut tag) => tag.rename(old_name, new_name),
|
||||
Expr::BinaryExpression(ref mut binary_expression) => {
|
||||
binary_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
Expr::Name(identifier) => identifier.rename(old_name, new_name),
|
||||
Expr::TagDeclarator(tag) => tag.rename(old_name, new_name),
|
||||
Expr::BinaryExpression(binary_expression) => binary_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::FunctionExpression(_function_identifier) => {}
|
||||
Expr::CallExpressionKw(ref mut call_expression) => call_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::PipeExpression(ref mut pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::CallExpressionKw(call_expression) => call_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::PipeExpression(pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::PipeSubstitution(_) => {}
|
||||
Expr::ArrayExpression(ref mut array_expression) => array_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::ArrayRangeExpression(ref mut array_range) => array_range.rename_identifiers(old_name, new_name),
|
||||
Expr::ObjectExpression(ref mut object_expression) => {
|
||||
object_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
Expr::MemberExpression(ref mut member_expression) => {
|
||||
member_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
Expr::UnaryExpression(ref mut unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::IfExpression(ref mut expr) => expr.rename_identifiers(old_name, new_name),
|
||||
Expr::ArrayExpression(array_expression) => array_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::ArrayRangeExpression(array_range) => array_range.rename_identifiers(old_name, new_name),
|
||||
Expr::ObjectExpression(object_expression) => object_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::MemberExpression(member_expression) => member_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::UnaryExpression(unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
|
||||
Expr::IfExpression(expr) => expr.rename_identifiers(old_name, new_name),
|
||||
Expr::LabelledExpression(expr) => expr.expr.rename_identifiers(old_name, new_name),
|
||||
Expr::AscribedExpression(expr) => expr.expr.rename_identifiers(old_name, new_name),
|
||||
Expr::None(_) => {}
|
||||
@ -1325,15 +1316,9 @@ impl BinaryPart {
|
||||
match self {
|
||||
BinaryPart::Literal(_) => {}
|
||||
BinaryPart::Name(_) => {}
|
||||
BinaryPart::BinaryExpression(ref mut binary_expression) => {
|
||||
binary_expression.replace_value(source_range, new_value)
|
||||
}
|
||||
BinaryPart::CallExpressionKw(ref mut call_expression) => {
|
||||
call_expression.replace_value(source_range, new_value)
|
||||
}
|
||||
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
||||
unary_expression.replace_value(source_range, new_value)
|
||||
}
|
||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.replace_value(source_range, new_value),
|
||||
BinaryPart::CallExpressionKw(call_expression) => call_expression.replace_value(source_range, new_value),
|
||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.replace_value(source_range, new_value),
|
||||
BinaryPart::MemberExpression(_) => {}
|
||||
BinaryPart::IfExpression(e) => e.replace_value(source_range, new_value),
|
||||
BinaryPart::AscribedExpression(e) => e.expr.replace_value(source_range, new_value),
|
||||
@ -1370,21 +1355,13 @@ impl BinaryPart {
|
||||
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||
match self {
|
||||
BinaryPart::Literal(_literal) => {}
|
||||
BinaryPart::Name(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||
BinaryPart::BinaryExpression(ref mut binary_expression) => {
|
||||
binary_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
BinaryPart::CallExpressionKw(ref mut call_expression) => {
|
||||
call_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
||||
unary_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
BinaryPart::MemberExpression(ref mut member_expression) => {
|
||||
member_expression.rename_identifiers(old_name, new_name)
|
||||
}
|
||||
BinaryPart::IfExpression(ref mut if_expression) => if_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::AscribedExpression(ref mut e) => e.expr.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::Name(identifier) => identifier.rename(old_name, new_name),
|
||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::CallExpressionKw(call_expression) => call_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::MemberExpression(member_expression) => member_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::IfExpression(if_expression) => if_expression.rename_identifiers(old_name, new_name),
|
||||
BinaryPart::AscribedExpression(e) => e.expr.rename_identifiers(old_name, new_name),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2824,7 +2801,7 @@ impl MemberExpression {
|
||||
self.object.rename_identifiers(old_name, new_name);
|
||||
|
||||
match &mut self.property {
|
||||
LiteralIdentifier::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||
LiteralIdentifier::Identifier(identifier) => identifier.rename(old_name, new_name),
|
||||
LiteralIdentifier::Literal(_) => {}
|
||||
}
|
||||
}
|
||||
@ -3312,7 +3289,7 @@ impl Type {
|
||||
.map(|t| t.human_friendly_type())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" or "),
|
||||
Type::Object { .. } => format!("an object with fields `{}`", self),
|
||||
Type::Object { .. } => format!("an object with fields `{self}`"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -3469,7 +3446,11 @@ pub struct RequiredParamAfterOptionalParam(pub Box<Parameter>);
|
||||
|
||||
impl std::fmt::Display for RequiredParamAfterOptionalParam {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "KCL functions must declare any optional parameters after all the required parameters. But your required parameter {} is _after_ an optional parameter. You must move it to before the optional parameters instead.", self.0.identifier.name)
|
||||
write!(
|
||||
f,
|
||||
"KCL functions must declare any optional parameters after all the required parameters. But your required parameter {} is _after_ an optional parameter. You must move it to before the optional parameters instead.",
|
||||
self.0.identifier.name
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
use super::CompilationError;
|
||||
use crate::{
|
||||
parsing::ast::types::{BinaryExpression, BinaryOperator, BinaryPart, Node},
|
||||
SourceRange,
|
||||
parsing::ast::types::{BinaryExpression, BinaryOperator, BinaryPart, Node},
|
||||
};
|
||||
|
||||
/// Parses a list of tokens (in infix order, i.e. as the user typed them)
|
||||
@ -127,11 +127,11 @@ impl From<BinaryOperator> for BinaryExpressionToken {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
ModuleId,
|
||||
parsing::{
|
||||
ast::types::{Literal, LiteralValue},
|
||||
token::NumericSuffix,
|
||||
},
|
||||
ModuleId,
|
||||
};
|
||||
|
||||
#[test]
|
||||
|
@ -1,11 +1,11 @@
|
||||
use crate::{
|
||||
ModuleId,
|
||||
errors::{CompilationError, KclError, KclErrorDetails},
|
||||
parsing::{
|
||||
ast::types::{Node, Program},
|
||||
token::TokenStream,
|
||||
},
|
||||
source_range::SourceRange,
|
||||
ModuleId,
|
||||
};
|
||||
|
||||
pub(crate) mod ast;
|
||||
@ -18,7 +18,7 @@ pub const PIPE_OPERATOR: &str = "|>";
|
||||
|
||||
// `?` like behavior for `Result`s to return a ParseResult if there is an error.
|
||||
macro_rules! pr_try {
|
||||
($e: expr) => {
|
||||
($e: expr_2021) => {
|
||||
match $e {
|
||||
Ok(a) => a,
|
||||
Err(e) => return e.into(),
|
||||
@ -187,7 +187,7 @@ pub fn deprecation(s: &str, kind: DeprecationKind) -> Option<&'static str> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
macro_rules! parse_and_lex {
|
||||
($func_name:ident, $test_kcl_program:expr) => {
|
||||
($func_name:ident, $test_kcl_program:expr_2021) => {
|
||||
#[test]
|
||||
fn $func_name() {
|
||||
let _ = crate::parsing::top_level_parse($test_kcl_program);
|
||||
|
@ -14,14 +14,16 @@ use winnow::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
DeprecationKind,
|
||||
ast::types::{AscribedExpression, ImportPath, LabelledExpression},
|
||||
token::{NumericSuffix, RESERVED_WORDS},
|
||||
DeprecationKind,
|
||||
};
|
||||
use crate::{
|
||||
IMPORT_FILE_EXTENSIONS, SourceRange, TypedPath,
|
||||
errors::{CompilationError, Severity, Tag},
|
||||
execution::types::ArrayLen,
|
||||
parsing::{
|
||||
PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR,
|
||||
ast::types::{
|
||||
Annotation, ArrayExpression, ArrayRangeExpression, BinaryExpression, BinaryOperator, BinaryPart, BodyItem,
|
||||
BoxNode, CallExpressionKw, CommentStyle, DefaultParamVal, ElseIf, Expr, ExpressionStatement,
|
||||
@ -33,9 +35,7 @@ use crate::{
|
||||
},
|
||||
math::BinaryExpressionToken,
|
||||
token::{Token, TokenSlice, TokenType},
|
||||
PIPE_OPERATOR, PIPE_SUBSTITUTION_OPERATOR,
|
||||
},
|
||||
SourceRange, TypedPath, IMPORT_FILE_EXTENSIONS,
|
||||
};
|
||||
|
||||
thread_local! {
|
||||
@ -602,7 +602,7 @@ fn binary_operator(i: &mut TokenSlice) -> ModalResult<BinaryOperator> {
|
||||
return Err(CompilationError::fatal(
|
||||
token.as_source_range(),
|
||||
format!("{} is not a binary operator", token.value.as_str()),
|
||||
))
|
||||
));
|
||||
}
|
||||
};
|
||||
Ok(op)
|
||||
@ -726,7 +726,7 @@ fn shebang(i: &mut TokenSlice) -> ModalResult<Node<Shebang>> {
|
||||
opt(whitespace).parse_next(i)?;
|
||||
|
||||
Ok(Node::new(
|
||||
Shebang::new(format!("#!{}", value)),
|
||||
Shebang::new(format!("#!{value}")),
|
||||
0,
|
||||
tokens.last().unwrap().end,
|
||||
tokens.first().unwrap().module_id,
|
||||
@ -1926,7 +1926,7 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
|
||||
return Err(ErrMode::Cut(
|
||||
CompilationError::fatal(
|
||||
path_range,
|
||||
format!("Invalid import path for import from std: {}.", path_string),
|
||||
format!("Invalid import path for import from std: {path_string}."),
|
||||
)
|
||||
.into(),
|
||||
));
|
||||
@ -1938,7 +1938,10 @@ fn validate_path_string(path_string: String, var_name: bool, path_range: SourceR
|
||||
if !IMPORT_FILE_EXTENSIONS.contains(&extn.to_string_lossy().to_string()) {
|
||||
ParseContext::warn(CompilationError::err(
|
||||
path_range,
|
||||
format!("unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}", IMPORT_FILE_EXTENSIONS.join(", ")),
|
||||
format!(
|
||||
"unsupported import path format. KCL files can be imported from the current project, CAD files with the following formats are supported: {}",
|
||||
IMPORT_FILE_EXTENSIONS.join(", ")
|
||||
),
|
||||
))
|
||||
}
|
||||
ImportPath::Foreign {
|
||||
@ -2210,7 +2213,7 @@ fn declaration(i: &mut TokenSlice) -> ModalResult<BoxNode<VariableDeclaration>>
|
||||
if matches!(val, Expr::FunctionExpression(_)) {
|
||||
return Err(CompilationError::fatal(
|
||||
SourceRange::new(start, dec_end, id.module_id),
|
||||
format!("Expected a `fn` variable kind, found: `{}`", kind),
|
||||
format!("Expected a `fn` variable kind, found: `{kind}`"),
|
||||
));
|
||||
}
|
||||
Ok(val)
|
||||
@ -3312,10 +3315,10 @@ fn fn_call_kw(i: &mut TokenSlice) -> ModalResult<Node<CallExpressionKw>> {
|
||||
ParseContext::warn(
|
||||
CompilationError::err(
|
||||
result.as_source_range(),
|
||||
format!("Calling `{}` is deprecated, prefer using `{}`.", callee_str, suggestion),
|
||||
format!("Calling `{callee_str}` is deprecated, prefer using `{suggestion}`."),
|
||||
)
|
||||
.with_suggestion(
|
||||
format!("Replace `{}` with `{}`", callee_str, suggestion),
|
||||
format!("Replace `{callee_str}` with `{suggestion}`"),
|
||||
suggestion,
|
||||
None,
|
||||
Tag::Deprecated,
|
||||
@ -3333,13 +3336,13 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use crate::{
|
||||
parsing::ast::types::{BodyItem, Expr, VariableKind},
|
||||
ModuleId,
|
||||
parsing::ast::types::{BodyItem, Expr, VariableKind},
|
||||
};
|
||||
|
||||
fn assert_reserved(word: &str) {
|
||||
// Try to use it as a variable name.
|
||||
let code = format!(r#"{} = 0"#, word);
|
||||
let code = format!(r#"{word} = 0"#);
|
||||
let result = crate::parsing::top_level_parse(code.as_str());
|
||||
let err = &result.unwrap_errs().next().unwrap();
|
||||
// Which token causes the error may change. In "return = 0", for
|
||||
@ -5263,7 +5266,7 @@ mod snapshot_math_tests {
|
||||
// The macro takes a KCL program, ensures it tokenizes and parses, then compares
|
||||
// its parsed AST to a snapshot (kept in this repo in a file under snapshots/ dir)
|
||||
macro_rules! snapshot_test {
|
||||
($func_name:ident, $test_kcl_program:expr) => {
|
||||
($func_name:ident, $test_kcl_program:expr_2021) => {
|
||||
#[test]
|
||||
fn $func_name() {
|
||||
let module_id = crate::ModuleId::default();
|
||||
@ -5301,7 +5304,7 @@ mod snapshot_tests {
|
||||
// The macro takes a KCL program, ensures it tokenizes and parses, then compares
|
||||
// its parsed AST to a snapshot (kept in this repo in a file under snapshots/ dir)
|
||||
macro_rules! snapshot_test {
|
||||
($func_name:ident, $test_kcl_program:expr) => {
|
||||
($func_name:ident, $test_kcl_program:expr_2021) => {
|
||||
#[test]
|
||||
fn $func_name() {
|
||||
let module_id = crate::ModuleId::default();
|
||||
|
@ -16,10 +16,10 @@ use winnow::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
CompilationError, ModuleId,
|
||||
errors::KclError,
|
||||
parsing::ast::types::{ItemVisibility, VariableKind},
|
||||
source_range::SourceRange,
|
||||
CompilationError, ModuleId,
|
||||
};
|
||||
|
||||
mod tokeniser;
|
||||
@ -609,7 +609,7 @@ impl From<ParseError<Input<'_>, winnow::error::ContextError>> for KclError {
|
||||
// TODO: Add the Winnow parser context to the error.
|
||||
// See https://github.com/KittyCAD/modeling-app/issues/784
|
||||
KclError::new_lexical(crate::errors::KclErrorDetails::new(
|
||||
format!("found unknown token '{}'", bad_token),
|
||||
format!("found unknown token '{bad_token}'"),
|
||||
vec![SourceRange::new(offset, offset + 1, module_id)],
|
||||
))
|
||||
}
|
||||
|
@ -1,19 +1,19 @@
|
||||
use fnv::FnvHashMap;
|
||||
use lazy_static::lazy_static;
|
||||
use winnow::{
|
||||
LocatingSlice, Stateful,
|
||||
ascii::{digit1, multispace1},
|
||||
combinator::{alt, opt, peek, preceded, repeat},
|
||||
error::{ContextError, ParseError},
|
||||
prelude::*,
|
||||
stream::{Location, Stream},
|
||||
token::{any, none_of, take_till, take_until, take_while},
|
||||
LocatingSlice, Stateful,
|
||||
};
|
||||
|
||||
use super::TokenStream;
|
||||
use crate::{
|
||||
parsing::token::{Token, TokenType},
|
||||
ModuleId,
|
||||
parsing::token::{Token, TokenType},
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
|
@ -1,9 +1,9 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use schemars::{gen::SchemaGenerator, JsonSchema};
|
||||
use serde_json::{json, Value};
|
||||
use schemars::{JsonSchema, r#gen::SchemaGenerator};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use crate::settings::types::{project::ProjectConfiguration, Configuration};
|
||||
use crate::settings::types::{Configuration, project::ProjectConfiguration};
|
||||
|
||||
// Project settings example in TOML format
|
||||
const PROJECT_SETTINGS_EXAMPLE: &str = r#"[settings.app]
|
||||
@ -60,7 +60,7 @@ fn init_handlebars() -> handlebars::Handlebars<'static> {
|
||||
let pretty_options = array
|
||||
.iter()
|
||||
.filter_map(|v| v.as_str())
|
||||
.map(|s| format!("`{}`", s))
|
||||
.map(|s| format!("`{s}`"))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
out.write(&pretty_options)?;
|
||||
@ -89,17 +89,17 @@ fn init_handlebars() -> handlebars::Handlebars<'static> {
|
||||
Value::Null => out.write("None")?,
|
||||
Value::Bool(b) => out.write(&b.to_string())?,
|
||||
Value::Number(n) => out.write(&n.to_string())?,
|
||||
Value::String(s) => out.write(&format!("`{}`", s))?,
|
||||
Value::String(s) => out.write(&format!("`{s}`"))?,
|
||||
Value::Array(arr) => {
|
||||
let formatted = arr
|
||||
.iter()
|
||||
.map(|v| match v {
|
||||
Value::String(s) => format!("`{}`", s),
|
||||
_ => format!("{}", v),
|
||||
Value::String(s) => format!("`{s}`"),
|
||||
_ => format!("{v}"),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
out.write(&format!("[{}]", formatted))?;
|
||||
out.write(&format!("[{formatted}]"))?;
|
||||
}
|
||||
Value::Object(_) => out.write("(complex default)")?,
|
||||
}
|
||||
@ -122,7 +122,7 @@ pub fn generate_settings_docs() {
|
||||
let hbs = init_handlebars();
|
||||
|
||||
// Generate project settings documentation
|
||||
let mut settings = schemars::gen::SchemaSettings::default();
|
||||
let mut settings = schemars::r#gen::SchemaSettings::default();
|
||||
settings.inline_subschemas = true;
|
||||
settings.meta_schema = None; // We don't need the meta schema for docs
|
||||
settings.option_nullable = false; // Important - makes Option fields show properly
|
||||
|
@ -716,13 +716,15 @@ enable_ssao = false
|
||||
|
||||
let result = color.validate();
|
||||
if let Ok(r) = result {
|
||||
panic!("Expected an error, but got success: {:?}", r);
|
||||
panic!("Expected an error, but got success: {r:?}");
|
||||
}
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("color: Validation error: color"));
|
||||
.contains("color: Validation error: color")
|
||||
);
|
||||
|
||||
let appearance = AppearanceSettings {
|
||||
theme: AppTheme::System,
|
||||
@ -730,13 +732,15 @@ enable_ssao = false
|
||||
};
|
||||
let result = appearance.validate();
|
||||
if let Ok(r) = result {
|
||||
panic!("Expected an error, but got success: {:?}", r);
|
||||
panic!("Expected an error, but got success: {r:?}");
|
||||
}
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("color: Validation error: color"));
|
||||
.contains("color: Validation error: color")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -746,13 +750,15 @@ color = 1567.4"#;
|
||||
|
||||
let result = Configuration::parse_and_validate(settings_file);
|
||||
if let Ok(r) = result {
|
||||
panic!("Expected an error, but got success: {:?}", r);
|
||||
panic!("Expected an error, but got success: {r:?}");
|
||||
}
|
||||
assert!(result.is_err());
|
||||
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("color: Validation error: color"));
|
||||
.contains("color: Validation error: color")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
use validator::Validate;
|
||||
|
||||
use crate::settings::types::{
|
||||
is_default, AppColor, CommandBarSettings, DefaultTrue, OnboardingStatus, TextEditorSettings, UnitLength,
|
||||
AppColor, CommandBarSettings, DefaultTrue, OnboardingStatus, TextEditorSettings, UnitLength, is_default,
|
||||
};
|
||||
|
||||
/// Project specific settings for the app.
|
||||
@ -203,14 +203,16 @@ color = 1567.4"#;
|
||||
|
||||
let result = ProjectConfiguration::parse_and_validate(settings_file);
|
||||
if let Ok(r) = result {
|
||||
panic!("Expected an error, but got success: {:?}", r);
|
||||
panic!("Expected an error, but got success: {r:?}");
|
||||
}
|
||||
assert!(result.is_err());
|
||||
|
||||
assert!(result
|
||||
assert!(
|
||||
result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("color: Validation error: color"));
|
||||
.contains("color: Validation error: color")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1,14 +1,14 @@
|
||||
use std::{
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
panic::{AssertUnwindSafe, catch_unwind},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use crate::{
|
||||
ExecOutcome, ExecState, ExecutorContext, ModuleId,
|
||||
errors::KclError,
|
||||
execution::{EnvironmentRef, ModuleArtifactState},
|
||||
ExecOutcome, ExecState, ExecutorContext, ModuleId,
|
||||
};
|
||||
#[cfg(feature = "artifact-graph")]
|
||||
use crate::{
|
||||
@ -241,7 +241,10 @@ async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
|
||||
Ok((exec_state, ctx, env_ref, png, step)) => {
|
||||
let fail_path = test.output_dir.join("execution_error.snap");
|
||||
if std::fs::exists(&fail_path).unwrap() {
|
||||
panic!("This test case is expected to fail, but it passed. If this is intended, and the test should actually be passing now, please delete kcl-lib/{}", fail_path.to_string_lossy())
|
||||
panic!(
|
||||
"This test case is expected to fail, but it passed. If this is intended, and the test should actually be passing now, please delete kcl-lib/{}",
|
||||
fail_path.to_string_lossy()
|
||||
)
|
||||
}
|
||||
if render_to_png {
|
||||
twenty_twenty::assert_image(test.output_dir.join(RENDERED_MODEL_NAME), &png, 0.99);
|
||||
@ -287,10 +290,13 @@ async fn execute_test(test: &Test, render_to_png: bool, export_step: bool) {
|
||||
let report = error.clone().into_miette_report_with_outputs(&input).unwrap();
|
||||
let report = miette::Report::new(report);
|
||||
if previously_passed {
|
||||
eprintln!("This test case failed, but it previously passed. If this is intended, and the test should actually be failing now, please delete kcl-lib/{} and other associated passing artifacts", ok_path.to_string_lossy());
|
||||
eprintln!(
|
||||
"This test case failed, but it previously passed. If this is intended, and the test should actually be failing now, please delete kcl-lib/{} and other associated passing artifacts",
|
||||
ok_path.to_string_lossy()
|
||||
);
|
||||
panic!("{report:?}");
|
||||
}
|
||||
let report = format!("{:?}", report);
|
||||
let report = format!("{report:?}");
|
||||
|
||||
let err_result = catch_unwind(AssertUnwindSafe(|| {
|
||||
assert_snapshot(test, "Error from executing", || {
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Run all the KCL samples in the `kcl_samples` directory.
|
||||
use std::{
|
||||
fs,
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
panic::{AssertUnwindSafe, catch_unwind},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
@ -86,7 +86,11 @@ fn test_after_engine_ensure_kcl_samples_manifest_etc() {
|
||||
.into_iter()
|
||||
.filter(|name| !input_names.contains(name))
|
||||
.collect::<Vec<_>>();
|
||||
assert!(missing.is_empty(), "Expected input kcl-samples for the following. If these are no longer tests, delete the expected output directories for them in {}: {missing:?}", OUTPUTS_DIR.to_string_lossy());
|
||||
assert!(
|
||||
missing.is_empty(),
|
||||
"Expected input kcl-samples for the following. If these are no longer tests, delete the expected output directories for them in {}: {missing:?}",
|
||||
OUTPUTS_DIR.to_string_lossy()
|
||||
);
|
||||
|
||||
// We want to move the screenshot for the inputs to the public/kcl-samples
|
||||
// directory so that they can be used as inputs for the next run.
|
||||
@ -189,7 +193,7 @@ fn kcl_samples_inputs() -> Vec<Test> {
|
||||
let entry_point = if main_kcl_path.exists() {
|
||||
main_kcl_path
|
||||
} else {
|
||||
panic!("No main.kcl found in {:?}", sub_dir);
|
||||
panic!("No main.kcl found in {sub_dir:?}");
|
||||
};
|
||||
tests.push(test(&dir_name_str, entry_point));
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Standard library appearance.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd};
|
||||
use kittycad_modeling_cmds::{self as kcmc, shared::Color};
|
||||
use regex::Regex;
|
||||
use rgba_simple::Hex;
|
||||
@ -10,8 +10,8 @@ use super::args::TyF64;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{ArrayLen, RuntimeType},
|
||||
ExecState, KclValue, SolidOrImportedGeometry,
|
||||
types::{ArrayLen, RuntimeType},
|
||||
},
|
||||
std::Args,
|
||||
};
|
||||
@ -63,7 +63,7 @@ pub async fn appearance(exec_state: &mut ExecState, args: Args) -> Result<KclVal
|
||||
// Make sure the color if set is valid.
|
||||
if !HEX_REGEX.is_match(&color) {
|
||||
return Err(KclError::new_semantic(KclErrorDetails::new(
|
||||
format!("Invalid hex color (`{}`), try something like `#fff000`", color),
|
||||
format!("Invalid hex color (`{color}`), try something like `#fff000`"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
}
|
||||
|
@ -7,12 +7,13 @@ use serde::Serialize;
|
||||
use super::fillet::EdgeReference;
|
||||
pub use crate::execution::fn_call::Args;
|
||||
use crate::{
|
||||
ModuleId,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
kcl_value::FunctionSource,
|
||||
types::{NumericType, PrimitiveType, RuntimeType, UnitAngle, UnitLen, UnitType},
|
||||
ExecState, ExtrudeSurface, Helix, KclObjectFields, KclValue, Metadata, PlaneInfo, Sketch, SketchSurface, Solid,
|
||||
TagIdentifier,
|
||||
kcl_value::FunctionSource,
|
||||
types::{NumericType, PrimitiveType, RuntimeType, UnitAngle, UnitLen, UnitType},
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
source_range::SourceRange,
|
||||
@ -21,7 +22,6 @@ use crate::{
|
||||
sketch::FaceTag,
|
||||
sweep::SweepPath,
|
||||
},
|
||||
ModuleId,
|
||||
};
|
||||
|
||||
const ERROR_STRING_SKETCH_TO_SOLID_HELPER: &str =
|
||||
@ -97,8 +97,8 @@ impl JsonSchema for TyF64 {
|
||||
"TyF64".to_string()
|
||||
}
|
||||
|
||||
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
gen.subschema_for::<f64>()
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
r#gen.subschema_for::<f64>()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,15 +1,15 @@
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use crate::{
|
||||
ExecutorContext,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
ExecState,
|
||||
fn_call::{Arg, Args, KwArgs},
|
||||
kcl_value::{FunctionSource, KclValue},
|
||||
types::RuntimeType,
|
||||
ExecState,
|
||||
},
|
||||
source_range::SourceRange,
|
||||
ExecutorContext,
|
||||
};
|
||||
|
||||
/// Apply a function to each element of an array.
|
||||
|
@ -5,14 +5,14 @@ use anyhow::Result;
|
||||
use super::args::TyF64;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::RuntimeType, ExecState, KclValue},
|
||||
execution::{ExecState, KclValue, types::RuntimeType},
|
||||
std::Args,
|
||||
};
|
||||
|
||||
async fn _assert(value: bool, message: &str, args: &Args) -> Result<(), KclError> {
|
||||
if !value {
|
||||
return Err(KclError::new_type(KclErrorDetails::new(
|
||||
format!("assert failed: {}", message),
|
||||
format!("assert failed: {message}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
}
|
||||
|
@ -58,4 +58,11 @@ impl Axis3dOrPoint3d {
|
||||
Axis3dOrPoint3d::Point(point) => point.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn axis_origin(&self) -> Option<[TyF64; 3]> {
|
||||
match self {
|
||||
Axis3dOrPoint3d::Axis { origin, .. } => Some(origin.clone()),
|
||||
Axis3dOrPoint3d::Point(..) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
//! Standard library chamfers.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, shared::CutType, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit, shared::CutType};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
|
||||
use super::args::TyF64;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::RuntimeType, ChamferSurface, EdgeCut, ExecState, ExtrudeSurface, GeoMeta, KclValue, ModelingCmdMeta,
|
||||
Solid,
|
||||
ChamferSurface, EdgeCut, ExecState, ExtrudeSurface, GeoMeta, KclValue, ModelingCmdMeta, Solid,
|
||||
types::RuntimeType,
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{fillet::EdgeReference, Args},
|
||||
std::{Args, fillet::EdgeReference},
|
||||
};
|
||||
|
||||
pub(crate) const DEFAULT_TOLERANCE: f64 = 0.0000001;
|
||||
|
@ -4,10 +4,9 @@ use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{
|
||||
each_cmd as mcmd,
|
||||
ok_response::{output::EntityGetAllChildUuids, OkModelingCmdResponse},
|
||||
ModelingCmd, each_cmd as mcmd,
|
||||
ok_response::{OkModelingCmdResponse, output::EntityGetAllChildUuids},
|
||||
websocket::OkWebSocketResponseData,
|
||||
ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds::{self as kcmc};
|
||||
|
||||
@ -15,11 +14,11 @@ use super::extrude::do_post_extrude;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{NumericType, PrimitiveType, RuntimeType},
|
||||
ExecState, GeometryWithImportedGeometry, KclValue, ModelingCmdMeta, Sketch, Solid,
|
||||
types::{NumericType, PrimitiveType, RuntimeType},
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{extrude::NamedCapTags, Args},
|
||||
std::{Args, extrude::NamedCapTags},
|
||||
};
|
||||
|
||||
/// Clone a sketch or solid.
|
||||
@ -91,7 +90,7 @@ async fn inner_clone(
|
||||
.await
|
||||
.map_err(|e| {
|
||||
KclError::new_internal(KclErrorDetails::new(
|
||||
format!("failed to fix tags and references: {:?}", e),
|
||||
format!("failed to fix tags and references: {e:?}"),
|
||||
vec![args.source_range],
|
||||
))
|
||||
})?;
|
||||
@ -320,10 +319,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Sketch { value: cube } = cube else {
|
||||
panic!("Expected a sketch, got: {:?}", cube);
|
||||
panic!("Expected a sketch, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Sketch { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a sketch, got: {:?}", cloned_cube);
|
||||
panic!("Expected a sketch, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
@ -369,10 +368,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Solid { value: cube } = cube else {
|
||||
panic!("Expected a solid, got: {:?}", cube);
|
||||
panic!("Expected a solid, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Solid { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a solid, got: {:?}", cloned_cube);
|
||||
panic!("Expected a solid, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
@ -427,10 +426,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Sketch { value: cube } = cube else {
|
||||
panic!("Expected a sketch, got: {:?}", cube);
|
||||
panic!("Expected a sketch, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Sketch { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a sketch, got: {:?}", cloned_cube);
|
||||
panic!("Expected a sketch, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
@ -483,10 +482,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Solid { value: cube } = cube else {
|
||||
panic!("Expected a solid, got: {:?}", cube);
|
||||
panic!("Expected a solid, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Solid { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a solid, got: {:?}", cloned_cube);
|
||||
panic!("Expected a solid, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
@ -555,10 +554,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Solid { value: cube } = cube else {
|
||||
panic!("Expected a solid, got: {:?}", cube);
|
||||
panic!("Expected a solid, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Solid { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a solid, got: {:?}", cloned_cube);
|
||||
panic!("Expected a solid, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
@ -655,10 +654,10 @@ clonedCube = clone(cube)
|
||||
assert_ne!(cube, cloned_cube);
|
||||
|
||||
let KclValue::Solid { value: cube } = cube else {
|
||||
panic!("Expected a solid, got: {:?}", cube);
|
||||
panic!("Expected a solid, got: {cube:?}");
|
||||
};
|
||||
let KclValue::Solid { value: cloned_cube } = cloned_cube else {
|
||||
panic!("Expected a solid, got: {:?}", cloned_cube);
|
||||
panic!("Expected a solid, got: {cloned_cube:?}");
|
||||
};
|
||||
|
||||
assert_ne!(cube.id, cloned_cube.id);
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Constructive Solid Geometry (CSG) operations.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit};
|
||||
use kittycad_modeling_cmds::{
|
||||
self as kcmc,
|
||||
ok_response::OkModelingCmdResponse,
|
||||
@ -9,11 +9,11 @@ use kittycad_modeling_cmds::{
|
||||
websocket::OkWebSocketResponseData,
|
||||
};
|
||||
|
||||
use super::{args::TyF64, DEFAULT_TOLERANCE_MM};
|
||||
use super::{DEFAULT_TOLERANCE_MM, args::TyF64};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{types::RuntimeType, ExecState, KclValue, ModelingCmdMeta, Solid},
|
||||
std::{patterns::GeometryTrait, Args},
|
||||
execution::{ExecState, KclValue, ModelingCmdMeta, Solid, types::RuntimeType},
|
||||
std::{Args, patterns::GeometryTrait},
|
||||
};
|
||||
|
||||
/// Union two or more solids into a single solid.
|
||||
|
@ -1,18 +1,18 @@
|
||||
//! Edge helper functions.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, ok_response::OkModelingCmdResponse, websocket::OkWebSocketResponseData, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, ok_response::OkModelingCmdResponse, websocket::OkWebSocketResponseData};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{ArrayLen, RuntimeType},
|
||||
ExecState, ExtrudeSurface, KclValue, ModelingCmdMeta, TagIdentifier,
|
||||
types::{ArrayLen, RuntimeType},
|
||||
},
|
||||
std::{sketch::FaceTag, Args},
|
||||
SourceRange,
|
||||
std::{Args, sketch::FaceTag},
|
||||
};
|
||||
|
||||
/// Get the opposite edge to the edge given.
|
||||
@ -55,7 +55,7 @@ async fn inner_get_opposite_edge(
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("mcmd::Solid3dGetOppositeEdge response was not as expected: {:?}", resp),
|
||||
format!("mcmd::Solid3dGetOppositeEdge response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
@ -104,10 +104,7 @@ async fn inner_get_next_adjacent_edge(
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!(
|
||||
"mcmd::Solid3dGetNextAdjacentEdge response was not as expected: {:?}",
|
||||
resp
|
||||
),
|
||||
format!("mcmd::Solid3dGetNextAdjacentEdge response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
@ -160,10 +157,7 @@ async fn inner_get_previous_adjacent_edge(
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!(
|
||||
"mcmd::Solid3dGetPrevAdjacentEdge response was not as expected: {:?}",
|
||||
resp
|
||||
),
|
||||
format!("mcmd::Solid3dGetPrevAdjacentEdge response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
@ -259,7 +253,7 @@ async fn inner_get_common_edge(
|
||||
} = &resp
|
||||
else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("mcmd::Solid3dGetCommonEdge response was not as expected: {:?}", resp),
|
||||
format!("mcmd::Solid3dGetCommonEdge response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
|
@ -4,13 +4,12 @@ use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{
|
||||
each_cmd as mcmd,
|
||||
ModelingCmd, each_cmd as mcmd,
|
||||
length_unit::LengthUnit,
|
||||
ok_response::OkModelingCmdResponse,
|
||||
output::ExtrusionFaceInfo,
|
||||
shared::{ExtrusionFaceCapType, Opposite},
|
||||
websocket::{ModelingCmdReq, OkWebSocketResponseData},
|
||||
ModelingCmd,
|
||||
};
|
||||
use kittycad_modeling_cmds::{
|
||||
self as kcmc,
|
||||
@ -18,12 +17,12 @@ use kittycad_modeling_cmds::{
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{args::TyF64, utils::point_to_mm, DEFAULT_TOLERANCE_MM};
|
||||
use super::{DEFAULT_TOLERANCE_MM, args::TyF64, utils::point_to_mm};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::RuntimeType, ArtifactId, ExecState, ExtrudeSurface, GeoMeta, KclValue, ModelingCmdMeta, Path, Sketch,
|
||||
SketchSurface, Solid,
|
||||
ArtifactId, ExecState, ExtrudeSurface, GeoMeta, KclValue, ModelingCmdMeta, Path, Sketch, SketchSurface, Solid,
|
||||
types::RuntimeType,
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
std::Args,
|
||||
|
@ -2,20 +2,20 @@
|
||||
|
||||
use anyhow::Result;
|
||||
use indexmap::IndexMap;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, shared::CutType, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit, shared::CutType};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{args::TyF64, DEFAULT_TOLERANCE_MM};
|
||||
use super::{DEFAULT_TOLERANCE_MM, args::TyF64};
|
||||
use crate::{
|
||||
SourceRange,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::RuntimeType, EdgeCut, ExecState, ExtrudeSurface, FilletSurface, GeoMeta, KclValue, ModelingCmdMeta,
|
||||
Solid, TagIdentifier,
|
||||
EdgeCut, ExecState, ExtrudeSurface, FilletSurface, GeoMeta, KclValue, ModelingCmdMeta, Solid, TagIdentifier,
|
||||
types::RuntimeType,
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
std::Args,
|
||||
SourceRange,
|
||||
};
|
||||
|
||||
/// A tag or a uuid of an edge.
|
||||
|
@ -1,17 +1,17 @@
|
||||
//! Standard library helices.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, shared::Angle, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit, shared::Angle};
|
||||
use kittycad_modeling_cmds::{self as kcmc, shared::Point3d};
|
||||
|
||||
use super::args::TyF64;
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{PrimitiveType, RuntimeType},
|
||||
ExecState, Helix as HelixValue, KclValue, ModelingCmdMeta, Solid,
|
||||
types::{PrimitiveType, RuntimeType},
|
||||
},
|
||||
std::{axis_or_reference::Axis3dOrEdgeReference, Args},
|
||||
std::{Args, axis_or_reference::Axis3dOrEdgeReference},
|
||||
};
|
||||
|
||||
/// Create a helix.
|
||||
|
@ -3,18 +3,18 @@
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, length_unit::LengthUnit, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd, length_unit::LengthUnit};
|
||||
use kittycad_modeling_cmds as kcmc;
|
||||
|
||||
use super::{args::TyF64, DEFAULT_TOLERANCE_MM};
|
||||
use super::{DEFAULT_TOLERANCE_MM, args::TyF64};
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{NumericType, RuntimeType},
|
||||
ExecState, KclValue, ModelingCmdMeta, Sketch, Solid,
|
||||
types::{NumericType, RuntimeType},
|
||||
},
|
||||
parsing::ast::types::TagNode,
|
||||
std::{extrude::do_post_extrude, Args},
|
||||
std::{Args, extrude::do_post_extrude},
|
||||
};
|
||||
|
||||
const DEFAULT_V_DEGREE: u32 = 2;
|
||||
|
@ -3,13 +3,13 @@
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{
|
||||
CompilationError,
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{ArrayLen, NumericType, RuntimeType},
|
||||
ExecState, KclValue,
|
||||
types::{ArrayLen, NumericType, RuntimeType},
|
||||
},
|
||||
std::args::{Args, TyF64},
|
||||
CompilationError,
|
||||
};
|
||||
|
||||
/// Compute the remainder after dividing `num` by `div`.
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Standard library mirror.
|
||||
|
||||
use anyhow::Result;
|
||||
use kcmc::{each_cmd as mcmd, ModelingCmd};
|
||||
use kcmc::{ModelingCmd, each_cmd as mcmd};
|
||||
use kittycad_modeling_cmds::{
|
||||
self as kcmc, length_unit::LengthUnit, ok_response::OkModelingCmdResponse, shared::Point3d,
|
||||
websocket::OkWebSocketResponseData,
|
||||
@ -10,10 +10,10 @@ use kittycad_modeling_cmds::{
|
||||
use crate::{
|
||||
errors::{KclError, KclErrorDetails},
|
||||
execution::{
|
||||
types::{PrimitiveType, RuntimeType},
|
||||
ExecState, KclValue, Sketch,
|
||||
types::{PrimitiveType, RuntimeType},
|
||||
},
|
||||
std::{axis_or_reference::Axis2dOrEdgeReference, Args},
|
||||
std::{Args, axis_or_reference::Axis2dOrEdgeReference},
|
||||
};
|
||||
|
||||
/// Mirror a sketch.
|
||||
@ -84,14 +84,14 @@ async fn inner_mirror_2d(
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
"No edges found in mirror info".to_string(),
|
||||
vec![args.source_range],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
} else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("EntityMirror response was not as expected: {:?}", resp),
|
||||
format!("EntityMirror response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
@ -127,14 +127,14 @@ async fn inner_mirror_2d(
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
"No edges found in mirror info".to_string(),
|
||||
vec![args.source_range],
|
||||
)))
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
} else {
|
||||
return Err(KclError::new_engine(KclErrorDetails::new(
|
||||
format!("EntityMirrorAcrossEdge response was not as expected: {:?}", resp),
|
||||
format!("EntityMirrorAcrossEdge response was not as expected: {resp:?}"),
|
||||
vec![args.source_range],
|
||||
)));
|
||||
};
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user