Compare commits

...

14 Commits

Author SHA1 Message Date
3da6fc3b7e Bump to v0.4.0 (#413) 2023-09-07 20:04:04 -04:00
34dd15ead7 Add macOS universal release builds (#408)
* Add macOS universal release builds
Fixes #397

* Change macos to universal-apple-darwin

* Upload universal-apple-darwin/release

* Clean up

* Clean up
2023-09-07 19:19:58 -04:00
b3d441e9d6 start of fuzzing (#405)
* fuzzing

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* more tests

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* unicode

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* more fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix clippy

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* one more

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* one more

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* last one

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2023-09-06 21:56:10 -07:00
4b3dc3756c remove noisy log (#407) 2023-09-07 04:11:46 +00:00
10027b98b5 implement rename (#396)
* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* rename function

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* start of rename

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* cache rust

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix gnarly bug

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fucking tabs

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
Co-authored-by: Kurt Hutten <k.hutten@protonmail.ch>
2023-09-06 19:34:47 -07:00
da17dad63b remove rust tests in ci, already covered in build (#403) 2023-09-07 01:49:11 +00:00
fba6c422a8 Fix LSP tooltip cutoff, style hover/autocomplete tooltips, add text wrapping setting (#404)
* Fix: allow tooltips to overflow code pane
while keeping the same vertical and horizontal
scroll behavior that we've had.

Signed-off-by: Frank Noirot <frank@kittycad.io>

* Style tooltips in light and dark mode

* Fix: properly display autocomplete info as HTML
We were parsing it from md to html, but displaying
the parsed html as a string in the info box.

Signed-off-by: Frank Noirot <frank@kittycad.io>

* Fix z-index of command bar to show over code panel

* Let user set text wrapping in editor

* Style hover tooltips

* Fix failing tests
by not including line wrapping plugin in test mode

---------

Signed-off-by: Frank Noirot <frank@kittycad.io>
2023-09-06 21:27:30 -04:00
0b4b93932d tweak prettierignore (#401) 2023-09-07 00:31:36 +00:00
f42900ec46 break up ci (#400) 2023-09-07 10:15:38 +10:00
eeca624ba6 bump kitty lib (#398) 2023-09-06 22:45:43 +00:00
84d08bad16 Allow people to set format options (#389)
* better naming

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* up[dates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* bump version

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fix tests

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* whitespace

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2023-09-06 10:36:03 -07:00
1181f33e9d Bump to v0.3.2 (#392) 2023-09-06 09:04:06 -04:00
797e200d08 Make sure extra artifacts don't get uploaded on release (#390)
* Make sure extra artifacts don't get uploaded on release
Fixes #388

* Clean up
2023-09-06 07:52:58 -04:00
d2f231066b Franknoirot/debug rerendering (#387)
* Refactor: let Stream handle control drag status

* Fix: prevent app rerender on mouse move
By not setting the highlight range unless things
actually need to change. Setting the highlight range
still causes an app rerender, though.

Signed-off-by: Frank Noirot <frank@kittycad.io>

---------

Signed-off-by: Frank Noirot <frank@kittycad.io>
2023-09-06 15:32:53 +10:00
32 changed files with 3371 additions and 317 deletions

View File

@ -1,4 +1,4 @@
name: CI
name: CI
on:
pull_request:
@ -13,17 +13,31 @@ jobs:
check-format:
runs-on: 'ubuntu-20.04'
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- run: yarn fmt-check
check-types:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- uses: Swatinem/rust-cache@v2
with:
workspaces: "./src/wasm-lib"
- run: yarn build:wasm
- run: yarn tsc
build-test-web:
runs-on: ubuntu-20.04
@ -36,12 +50,15 @@ jobs:
- uses: actions/setup-node@v3
with:
node-version-file: '.nvmrc'
cache: 'yarn'
- run: yarn install
- run: yarn build:wasm
- uses: Swatinem/rust-cache@v2
with:
workspaces: "./src/wasm-lib"
- run: yarn tsc
- run: yarn build:wasm
- run: yarn simpleserver:ci
@ -49,14 +66,12 @@ jobs:
- run: yarn test:cov
- run: yarn test:rust
- id: export_version
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
build-apps:
needs: [check-format, build-test-web]
needs: [check-format, build-test-web, check-types]
runs-on: ${{ matrix.os }}
strategy:
matrix:
@ -87,6 +102,10 @@ jobs:
with:
workspaces: './src-tauri -> target'
- uses: Swatinem/rust-cache@v2
with:
workspaces: "./src/wasm-lib"
- name: wasm prep
shell: bash
run: |
@ -110,15 +129,22 @@ jobs:
- name: Fix format
run: yarn fmt
- name: install apple silicon target mac
if: matrix.os == 'macos-latest'
run: |
rustup target add aarch64-apple-darwin
- name: Build the app for the current platform (no upload)
uses: tauri-apps/tauri-action@v0
env:
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
with:
args: ${{ matrix.os == 'macos-latest' && '--target universal-apple-darwin' || '' }}
- uses: actions/upload-artifact@v3
with:
path: src-tauri/target/release/bundle/*/*
path: ${{ matrix.os == 'macos-latest' && 'src-tauri/target/universal-apple-darwin/release/bundle/*/*' || 'src-tauri/target/release/bundle/*/*' }}
publish-apps-release:
@ -133,8 +159,7 @@ jobs:
- name: Generate the update static endpoint
run: |
ls -l artifact
ls -l artifact/*
ls -l artifact/*/*itty*
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
LINUX_SIG=`cat artifact/appimage/*.AppImage.tar.gz.sig`
WINDOWS_SIG=`cat artifact/nsis/*.nsis.zip.sig`
@ -154,6 +179,10 @@ jobs:
"signature": $darwin_sig,
"url": $darwin_url
},
"darwin-aarch64": {
"signature": $darwin_sig,
"url": $darwin_url
},
"linux-x86_64": {
"signature": $linux_sig,
"url": $linux_url
@ -175,15 +204,15 @@ jobs:
uses: google-github-actions/setup-gcloud@v1.1.1
with:
project_id: kittycadapi
- name: Upload release files to public bucket
uses: google-github-actions/upload-cloud-storage@v1.0.3
with:
path: artifact
glob: '*/*'
glob: '*/*itty*'
parent: false
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
- name: Upload update endpoint to public bucket
uses: google-github-actions/upload-cloud-storage@v1.0.3
with:
@ -193,4 +222,4 @@ jobs:
- name: Upload release files to Github
uses: softprops/action-gh-release@v1
with:
files: artifact/*/*
files: artifact/*/*itty*

View File

@ -5,3 +5,5 @@ coverage
# Ignore Rust projects:
*.rs
target
src/wasm-lib/pkg
src/wasm-lib/kcl/bindings

View File

@ -86,3 +86,24 @@ The PR may serve as a place to discuss the human-readable changelog and extra QA
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
## Fuzzing the parser
Make sure you install cargo fuzz:
```bash
$ cargo install cargo-fuzz
```
```bash
$ cd src/wasm-lib/kcl
# list the fuzz targets
$ cargo fuzz list
# run the parser fuzzer
$ cargo +nightly fuzz run parser
```
For more information on fuzzing you can check out
[this guide](https://rust-fuzz.github.io/book/cargo-fuzz.html).

View File

@ -1,6 +1,6 @@
{
"name": "untitled-app",
"version": "0.3.1",
"version": "0.4.0",
"private": true,
"dependencies": {
"@codemirror/autocomplete": "^6.9.0",
@ -10,7 +10,7 @@
"@fortawesome/react-fontawesome": "^0.2.0",
"@headlessui/react": "^1.7.13",
"@headlessui/tailwindcss": "^0.2.0",
"@kittycad/lib": "^0.0.35",
"@kittycad/lib": "^0.0.36",
"@lezer/javascript": "^1.4.7",
"@open-rpc/client-js": "^1.8.1",
"@react-hook/resize-observer": "^1.2.6",

View File

@ -8,7 +8,7 @@
},
"package": {
"productName": "kittycad-modeling",
"version": "0.3.1"
"version": "0.4.0"
},
"tauri": {
"allowlist": {

View File

@ -10,7 +10,7 @@ import { DebugPanel } from './components/DebugPanel'
import { v4 as uuidv4 } from 'uuid'
import { asyncParser } from './lang/abstractSyntaxTree'
import { _executor } from './lang/executor'
import CodeMirror from '@uiw/react-codemirror'
import CodeMirror, { Extension } from '@uiw/react-codemirror'
import { linter, lintGutter } from '@codemirror/lint'
import { ViewUpdate, EditorView } from '@codemirror/view'
import {
@ -54,6 +54,7 @@ import { useGlobalStateContext } from 'hooks/useGlobalStateContext'
import { onboardingPaths } from 'routes/Onboarding'
import { LanguageServerClient } from 'editor/lsp'
import kclLanguage from 'editor/lsp/language'
import { CSSRuleObject } from 'tailwindcss/types/config'
export function App() {
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
@ -78,6 +79,7 @@ export function App() {
setArtifactMap,
engineCommandManager,
setEngineCommandManager,
highlightRange,
setHighlightRange,
setCursor2,
sourceRangeMap,
@ -91,7 +93,6 @@ export function App() {
openPanes,
setOpenPanes,
didDragInStream,
setDidDragInStream,
setStreamDimensions,
streamDimensions,
} = useStore((s) => ({
@ -112,6 +113,7 @@ export function App() {
setArtifactMap: s.setArtifactNSourceRangeMaps,
engineCommandManager: s.engineCommandManager,
setEngineCommandManager: s.setEngineCommandManager,
highlightRange: s.highlightRange,
setHighlightRange: s.setHighlightRange,
isShiftDown: s.isShiftDown,
setCursor: s.setCursor,
@ -128,7 +130,6 @@ export function App() {
openPanes: s.openPanes,
setOpenPanes: s.setOpenPanes,
didDragInStream: s.didDragInStream,
setDidDragInStream: s.setDidDragInStream,
setStreamDimensions: s.setStreamDimensions,
streamDimensions: s.streamDimensions,
}))
@ -138,7 +139,7 @@ export function App() {
context: { token },
},
settings: {
context: { showDebugPanel, theme, onboardingStatus },
context: { showDebugPanel, theme, onboardingStatus, textWrapping },
},
} = useGlobalStateContext()
@ -332,11 +333,14 @@ export function App() {
const unSubHover = engineCommandManager.subscribeToUnreliable({
event: 'highlight_set_entity',
callback: ({ data }) => {
if (!data?.entity_id) {
setHighlightRange([0, 0])
} else {
if (data?.entity_id) {
const sourceRange = sourceRangeMap[data.entity_id]
setHighlightRange(sourceRange)
} else if (
!highlightRange ||
(highlightRange[0] !== 0 && highlightRange[1] !== 0)
) {
setHighlightRange([0, 0])
}
},
})
@ -385,9 +389,6 @@ export function App() {
nativeEvent,
}) => {
nativeEvent.preventDefault()
if (isMouseDownInStream) {
setDidDragInStream(true)
}
const { x, y } = getNormalisedCoordinates({
clientX,
@ -422,17 +423,6 @@ export function App() {
}
}
const extraExtensions = useMemo(() => {
if (TEST) return []
return [
lintGutter(),
linter((_view) => {
return kclErrToDiagnostic(useStore.getState().kclErrors)
}),
EditorView.lineWrapping,
]
}, [])
// So this is a bit weird, we need to initialize the lsp server and client.
// But the server happens async so we break this into two parts.
// Below is the client and server promise.
@ -472,6 +462,25 @@ export function App() {
return plugin
}, [lspClient, isLSPServerReady])
const editorExtensions = useMemo(() => {
const extensions = [lineHighlightField] as Extension[]
if (kclLSP) extensions.push(kclLSP)
// These extensions have proven to mess with vitest
if (!TEST) {
extensions.push(
lintGutter(),
linter((_view) => {
return kclErrToDiagnostic(useStore.getState().kclErrors)
})
)
if (textWrapping === 'On') extensions.push(EditorView.lineWrapping)
}
return extensions
}, [kclLSP, textWrapping])
return (
<div
className="h-screen overflow-hidden relative flex flex-col cursor-pointer select-none"
@ -513,7 +522,7 @@ export function App() {
<CollapsiblePanel
title="Code"
icon={faCode}
className="open:!mb-2 overflow-x-hidden"
className="open:!mb-2"
open={openPanes.includes('code')}
>
<div className="px-2 py-1">
@ -527,16 +536,13 @@ export function App() {
</div>
<div
id="code-mirror-override"
className="overflow-x-hidden h-full"
className="full-height-subtract"
style={{ '--height-subtract': '4.25rem' } as CSSRuleObject}
>
<CodeMirror
className="h-full overflow-hidden-x"
className="h-full"
value={code}
extensions={
kclLSP
? [kclLSP, lineHighlightField, ...extraExtensions]
: [lineHighlightField, ...extraExtensions]
}
extensions={editorExtensions}
onChange={onChange}
onUpdate={onUpdate}
theme={editorTheme}

View File

@ -1,5 +1,5 @@
.panel {
@apply relative overflow-auto z-0;
@apply relative z-0;
@apply bg-chalkboard-10/70 backdrop-blur-sm;
}

View File

@ -196,7 +196,7 @@ const CommandBar = () => {
setCommandBarOpen(false)
clearState()
}}
className="fixed inset-0 overflow-y-auto p-4 pt-[25vh]"
className="fixed inset-0 z-40 overflow-y-auto p-4 pt-[25vh]"
>
<Transition.Child
enter="duration-100 ease-out"
@ -207,7 +207,7 @@ const CommandBar = () => {
leaveTo="opacity-0"
as={Fragment}
>
<Dialog.Overlay className="fixed z-40 inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
<Dialog.Overlay className="fixed inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
</Transition.Child>
<Transition.Child
enter="duration-100 ease-out"
@ -221,7 +221,7 @@ const CommandBar = () => {
<Combobox
value={selectedCommand}
onChange={handleCommandSelection}
className="rounded relative mx-auto z-40 p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
className="rounded relative mx-auto p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
as="div"
>
<div className="flex gap-2 items-center">

View File

@ -12,6 +12,7 @@ import Loading from './Loading'
export const Stream = ({ className = '' }) => {
const [isLoading, setIsLoading] = useState(true)
const [clickCoords, setClickCoords] = useState<{ x: number; y: number }>()
const videoRef = useRef<HTMLVideoElement>(null)
const {
mediaStream,
@ -71,6 +72,7 @@ export const Stream = ({ className = '' }) => {
})
setIsMouseDownInStream(true)
setClickCoords({ x, y })
}
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
@ -124,6 +126,19 @@ export const Stream = ({ className = '' }) => {
})
}
setDidDragInStream(false)
setClickCoords(undefined)
}
const handleMouseMove: MouseEventHandler<HTMLVideoElement> = (e) => {
if (!clickCoords) return
const delta =
((clickCoords.x - e.clientX) ** 2 + (clickCoords.y - e.clientY) ** 2) **
0.5
if (delta > 5 && !didDragInStream) {
setDidDragInStream(true)
}
}
return (
@ -139,6 +154,7 @@ export const Stream = ({ className = '' }) => {
onContextMenuCapture={(e) => e.preventDefault()}
onWheel={handleScroll}
onPlay={() => setIsLoading(false)}
onMouseMoveCapture={handleMouseMove}
className="w-full h-full"
/>
{isLoading && (

View File

@ -208,7 +208,13 @@ export class LanguageServerPlugin implements PluginValue {
filterText: filterText ?? label,
}
if (documentation) {
completion.info = formatContents(documentation)
completion.info = () => {
const htmlString = formatContents(documentation)
const htmlNode = document.createElement('div')
htmlNode.style.display = 'contents'
htmlNode.innerHTML = htmlString
return { dom: htmlNode }
}
}
return completion

View File

@ -82,11 +82,22 @@ code {
monospace;
}
.full-height-subtract {
--height-subtract: 2.25rem;
height: 100%;
max-height: calc(100% - var(--height-subtract));
}
#code-mirror-override .cm-editor {
@apply bg-transparent;
@apply h-full bg-transparent;
}
#code-mirror-override .cm-scroller {
@apply h-full;
}
#code-mirror-override .cm-scroller::-webkit-scrollbar {
@apply h-0;
}
#code-mirror-override .cm-activeLine,
@ -137,14 +148,39 @@ code {
}
#code-mirror-override .cm-tooltip {
font-size: 80%;
@apply text-xs shadow-md;
@apply bg-chalkboard-10 text-chalkboard-80;
@apply rounded-sm border-solid border border-chalkboard-40/30 border-l-liquid-10;
}
.dark #code-mirror-override .cm-tooltip {
@apply bg-chalkboard-110 text-chalkboard-40;
@apply border-chalkboard-70/20 border-l-liquid-70;
}
#code-mirror-override .cm-tooltip-hover {
@apply py-1 px-2 w-max max-w-md;
}
#code-mirror-override .cm-tooltip-hover .documentation {
padding: 5;
#code-mirror-override .cm-completionInfo {
@apply px-4 rounded-l-none;
@apply bg-chalkboard-10 text-liquid-90;
@apply border-liquid-40/30;
}
.dark #code-mirror-override .cm-completionInfo {
@apply bg-liquid-120 text-liquid-50;
@apply border-liquid-90/60;
}
#code-mirror-override .cm-tooltip-autocomplete li {
@apply px-2 py-1;
}
#code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
@apply bg-liquid-10 text-liquid-110;
}
.dark #code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
@apply bg-liquid-100 text-liquid-20;
}
#code-mirror-override .cm-content {

View File

@ -1564,7 +1564,7 @@ const key = 'c'`
start: code.indexOf('\n// this is a comment'),
end: code.indexOf('const key'),
value: {
type: 'block',
type: 'blockComment',
value: 'this is a comment',
},
}
@ -1602,7 +1602,7 @@ const key = 'c'`
start: 106,
end: 166,
value: {
type: 'block',
type: 'blockComment',
value: 'this is\n a comment\n spanning a few lines',
},
})
@ -1625,7 +1625,7 @@ const key = 'c'`
start: 125,
end: 141,
value: {
type: 'block',
type: 'blockComment',
value: 'a comment',
},
})

View File

@ -318,9 +318,9 @@ describe('it recasts wrapped object expressions in pipe bodies with correct inde
|> line({ to: [0.62, 4.15], tag: 'seg01' }, %)
|> line([2.77, -1.24], %)
|> angledLineThatIntersects({
angle: 201,
offset: -1.35,
intersectTag: 'seg01'
angle: 201,
offset: -1.35,
intersectTag: 'seg01'
}, %)
|> line([-0.42, -1.72], %)
show(part001)`

View File

@ -59,19 +59,19 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
` |> lineTo({ to: [1, 1], tag: 'abc1' }, %)`,
` |> line({ to: [-2.04, -0.7], tag: 'abc2' }, %)`,
` |> angledLine({`,
` angle: 157,`,
` length: 1.69,`,
` tag: 'abc3'`,
` angle: 157,`,
` length: 1.69,`,
` tag: 'abc3'`,
` }, %)`,
` |> angledLineOfXLength({`,
` angle: 217,`,
` length: 0.86,`,
` tag: 'abc4'`,
` angle: 217,`,
` length: 0.86,`,
` tag: 'abc4'`,
` }, %)`,
` |> angledLineOfYLength({`,
` angle: 104,`,
` length: 1.58,`,
` tag: 'abc5'`,
` angle: 104,`,
` length: 1.58,`,
` tag: 'abc5'`,
` }, %)`,
` |> angledLineToX({ angle: 55, to: -2.89, tag: 'abc6' }, %)`,
` |> angledLineToY({ angle: 330, to: 2.53, tag: 'abc7' }, %)`,
@ -144,9 +144,9 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
inputCode: bigExample,
callToSwap: [
`angledLine({`,
` angle: 157,`,
` length: 1.69,`,
` tag: 'abc3'`,
` angle: 157,`,
` length: 1.69,`,
` tag: 'abc3'`,
` }, %)`,
].join('\n'),
constraintType: 'horizontal',
@ -172,9 +172,9 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
inputCode: bigExample,
callToSwap: [
`angledLineOfXLength({`,
` angle: 217,`,
` length: 0.86,`,
` tag: 'abc4'`,
` angle: 217,`,
` length: 0.86,`,
` tag: 'abc4'`,
` }, %)`,
].join('\n'),
constraintType: 'horizontal',
@ -201,9 +201,9 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
inputCode: bigExample,
callToSwap: [
`angledLineOfYLength({`,
` angle: 104,`,
` length: 1.58,`,
` tag: 'abc5'`,
` angle: 104,`,
` length: 1.58,`,
` tag: 'abc5'`,
` }, %)`,
].join('\n'),
constraintType: 'vertical',

View File

@ -133,64 +133,64 @@ const myAng2 = 134
const part001 = startSketchAt([0, 0])
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|> angledLineToX([
-angleToMatchLengthX('seg01', myVar, %),
myVar
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
-angleToMatchLengthX('seg01', myVar, %),
myVar
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|> angledLineToY([
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|> angledLine([45, segLen('seg01', %)], %) // ln-lineTo-free should become angledLine
|> angledLine([45, segLen('seg01', %)], %) // ln-angledLineToX-free should become angledLine
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineToX-angle should become angledLine
|> angledLineToX([
angleToMatchLengthX('seg01', myVar2, %),
myVar2
], %) // ln-angledLineToX-xAbsolute should use angleToMatchLengthX to get angle
angleToMatchLengthX('seg01', myVar2, %),
myVar2
], %) // ln-angledLineToX-xAbsolute should use angleToMatchLengthX to get angle
|> angledLine([-45, segLen('seg01', %)], %) // ln-angledLineToY-free should become angledLine
|> angledLine([myAng2, segLen('seg01', %)], %) // ln-angledLineToY-angle should become angledLine
|> angledLineToY([
angleToMatchLengthY('seg01', myVar3, %),
myVar3
], %) // ln-angledLineToY-yAbsolute should use angleToMatchLengthY to get angle
angleToMatchLengthY('seg01', myVar3, %),
myVar3
], %) // ln-angledLineToY-yAbsolute should use angleToMatchLengthY to get angle
|> line([
min(segLen('seg01', %), myVar),
legLen(segLen('seg01', %), myVar)
], %) // ln-should use legLen for y
min(segLen('seg01', %), myVar),
legLen(segLen('seg01', %), myVar)
], %) // ln-should use legLen for y
|> line([
min(segLen('seg01', %), myVar),
-legLen(segLen('seg01', %), myVar)
], %) // ln-legLen but negative
min(segLen('seg01', %), myVar),
-legLen(segLen('seg01', %), myVar)
], %) // ln-legLen but negative
|> angledLine([-112, segLen('seg01', %)], %) // ln-should become angledLine
|> angledLine([myVar, segLen('seg01', %)], %) // ln-use segLen for secound arg
|> angledLine([45, segLen('seg01', %)], %) // ln-segLen again
|> angledLine([54, segLen('seg01', %)], %) // ln-should be transformed to angledLine
|> angledLineOfXLength([
legAngX(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-should use legAngX to calculate angle
legAngX(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-should use legAngX to calculate angle
|> angledLineOfXLength([
180 + legAngX(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-same as above but should have + 180 to match original quadrant
180 + legAngX(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-same as above but should have + 180 to match original quadrant
|> line([
legLen(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-legLen again but yRelative
legLen(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-legLen again but yRelative
|> line([
-legLen(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-negative legLen yRelative
-legLen(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-negative legLen yRelative
|> angledLine([58, segLen('seg01', %)], %) // ln-angledLineOfYLength-free should become angledLine
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineOfYLength-angle should become angledLine
|> angledLineOfXLength([
legAngY(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-angledLineOfYLength-yRelative use legAngY
legAngY(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-angledLineOfYLength-yRelative use legAngY
|> angledLineOfXLength([
270 + legAngY(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-angledLineOfYLength-yRelative with angle > 90 use binExp
270 + legAngY(segLen('seg01', %), myVar),
min(segLen('seg01', %), myVar)
], %) // ln-angledLineOfYLength-yRelative with angle > 90 use binExp
|> xLine(segLen('seg01', %), %) // ln-xLine-free should sub in segLen
|> yLine(segLen('seg01', %), %) // ln-yLine-free should sub in segLen
|> xLine(segLen('seg01', %), %) // ln-xLineTo-free should convert to xLine
@ -406,9 +406,9 @@ show(part001)`
'setVertDistance'
)
expect(expectedCode).toContain(`|> lineTo([
lastSegX(%) + myVar,
segEndY('seg01', %) + 2.93
], %) // xRelative`)
lastSegX(%) + myVar,
segEndY('seg01', %) + 2.93
], %) // xRelative`)
})
it('testing for yRelative to horizontal distance', async () => {
const expectedCode = await helperThing(
@ -417,9 +417,9 @@ show(part001)`
'setHorzDistance'
)
expect(expectedCode).toContain(`|> lineTo([
segEndX('seg01', %) + 2.6,
lastSegY(%) + myVar
], %) // yRelative`)
segEndX('seg01', %) + 2.6,
lastSegY(%) + myVar
], %) // yRelative`)
})
})
})

View File

@ -1426,7 +1426,6 @@ export function transformAstSketchLines({
selectionRanges.codeBasedSelections.forEach(({ range }, index) => {
const callBack = transformInfos?.[index].createNode
const transformTo = transformInfos?.[index].tooltip
console.log('transformTo', transformInfos)
if (!callBack || !transformTo) throw new Error('no callback helper')
const getNode = getNodeFromPathCurry(

View File

@ -62,6 +62,17 @@ export const settingsCommandBarMeta: CommandBarMeta = {
},
],
},
'Set Text Wrapping': {
displayValue: (args: string[]) => 'Set whether text in the editor wraps',
args: [
{
name: 'textWrapping',
type: 'select',
defaultValue: 'textWrapping',
options: [{ name: 'On' }, { name: 'Off' }],
},
],
},
'Set Onboarding Status': {
hide: 'both',
},
@ -78,6 +89,7 @@ export const settingsMachine = createMachine(
unitSystem: UnitSystem.Imperial,
baseUnit: 'in' as BaseUnit,
defaultDirectory: '',
textWrapping: 'On' as 'On' | 'Off',
showDebugPanel: false,
onboardingStatus: '',
},
@ -142,6 +154,17 @@ export const settingsMachine = createMachine(
target: 'idle',
internal: true,
},
'Set Text Wrapping': {
actions: [
assign({
textWrapping: (_, event) => event.data.textWrapping,
}),
'persistSettings',
'toastSuccess',
],
target: 'idle',
internal: true,
},
'Toggle Debug Panel': {
actions: [
assign({
@ -182,6 +205,7 @@ export const settingsMachine = createMachine(
data: { unitSystem: UnitSystem }
}
| { type: 'Set Base Unit'; data: { baseUnit: BaseUnit } }
| { type: 'Set Text Wrapping'; data: { textWrapping: 'On' | 'Off' } }
| { type: 'Set Onboarding Status'; data: { onboardingStatus: string } }
| { type: 'Toggle Debug Panel' },
},

View File

@ -18,6 +18,7 @@ export interface Typegen0 {
| 'Set Default Directory'
| 'Set Default Project Name'
| 'Set Onboarding Status'
| 'Set Text Wrapping'
| 'Set Theme'
| 'Set Unit System'
| 'Toggle Debug Panel'
@ -26,6 +27,7 @@ export interface Typegen0 {
| 'Set Default Directory'
| 'Set Default Project Name'
| 'Set Onboarding Status'
| 'Set Text Wrapping'
| 'Set Theme'
| 'Set Unit System'
| 'Toggle Debug Panel'
@ -34,6 +36,7 @@ export interface Typegen0 {
| 'Set Base Unit'
| 'Set Default Directory'
| 'Set Default Project Name'
| 'Set Text Wrapping'
| 'Set Theme'
| 'Set Unit System'
| 'Toggle Debug Panel'

View File

@ -1094,7 +1094,7 @@ dependencies = [
[[package]]
name = "kcl-lib"
version = "0.1.20"
version = "0.1.24"
dependencies = [
"anyhow",
"bson",

View File

@ -1,7 +1,7 @@
[package]
name = "kcl-lib"
description = "KittyCAD Language"
version = "0.1.20"
version = "0.1.24"
edition = "2021"
license = "MIT"
@ -11,7 +11,7 @@ license = "MIT"
anyhow = { version = "1.0.75", features = ["backtrace"] }
clap = { version = "4.4.2", features = ["cargo", "derive", "env", "unicode"] }
dashmap = "5.5.3"
derive-docs = { version = "0.1.1" }
derive-docs = { version = "0.1.3" }
#derive-docs = { path = "../derive-docs" }
kittycad = { version = "0.2.23", default-features = false, features = ["js"] }
lazy_static = "1.4.0"

4
src/wasm-lib/kcl/fuzz/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
target
corpus
artifacts
coverage

2218
src/wasm-lib/kcl/fuzz/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,27 @@
[package]
name = "kcl-lib-fuzz"
version = "0.0.0"
publish = false
edition = "2021"
[package.metadata]
cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
[dependencies.kcl-lib]
path = ".."
# Prevent this from interfering with workspaces
[workspace]
members = ["."]
[profile.release]
debug = 1
[[bin]]
name = "parser"
path = "fuzz_targets/parser.rs"
test = false
doc = false

View File

@ -0,0 +1,14 @@
#![no_main]
#[macro_use]
extern crate libfuzzer_sys;
extern crate kcl_lib;
fuzz_target!(|data: &[u8]| {
if let Ok(s) = std::str::from_utf8(data) {
let tokens = kcl_lib::tokeniser::lexer(s);
let parser = kcl_lib::parser::Parser::new(tokens);
if let Ok(_) = parser.ast() {
println!("OK");
}
}
});

View File

@ -27,12 +27,16 @@ pub struct Program {
}
impl Program {
pub fn recast(&self, indentation: &str, is_with_block: bool) -> String {
self.body
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
let indentation = options.get_indentation(indentation_level);
let result = self
.body
.iter()
.map(|statement| match statement.clone() {
BodyItem::ExpressionStatement(expression_statement) => {
expression_statement.expression.recast(indentation, false)
expression_statement
.expression
.recast(options, indentation_level, false)
}
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration
.declarations
@ -43,56 +47,44 @@ impl Program {
indentation,
variable_declaration.kind,
declaration.id.name,
declaration.init.recast("", false)
declaration.init.recast(options, 0, false)
)
})
.collect::<String>(),
BodyItem::ReturnStatement(return_statement) => {
format!("{}return {}", indentation, return_statement.argument.recast("", false))
format!(
"{}return {}",
indentation,
return_statement.argument.recast(options, 0, false)
)
}
})
.enumerate()
.map(|(index, recast_str)| {
let is_legit_custom_whitespace_or_comment = |s: String| s != " " && s != "\n" && s != " " && s != "\t";
// determine the value of startString
let last_white_space_or_comment = if index > 0 {
let tmp = if let Some(non_code_node) = self.non_code_meta.none_code_nodes.get(&(index - 1)) {
non_code_node.format(indentation)
} else {
" ".to_string()
};
tmp
} else {
" ".to_string()
};
// indentation of this line will be covered by the previous if we're using a custom whitespace or comment
let mut start_string = if is_legit_custom_whitespace_or_comment(last_white_space_or_comment) {
String::new()
} else {
indentation.to_owned()
};
if index == 0 {
let start_string = if index == 0 {
// We need to indent.
if let Some(start) = self.non_code_meta.start.clone() {
start_string = start.format(indentation);
start.format(&indentation)
} else {
start_string = indentation.to_owned();
indentation.to_string()
}
}
} else {
// Do nothing, we already applied the indentation elsewhere.
String::new()
};
// determine the value of endString
let maybe_line_break: String = if index == self.body.len() - 1 && !is_with_block {
// determine the value of the end string
// basically if we are inside a nested function we want to end with a new line
let maybe_line_break: String = if index == self.body.len() - 1 && indentation_level == 0 {
String::new()
} else {
"\n".to_string()
};
let mut custom_white_space_or_comment = match self.non_code_meta.none_code_nodes.get(&index) {
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(indentation),
let custom_white_space_or_comment = match self.non_code_meta.none_code_nodes.get(&index) {
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(&indentation),
None => String::new(),
};
if !is_legit_custom_whitespace_or_comment(custom_white_space_or_comment.clone()) {
custom_white_space_or_comment = String::new();
}
let end_string = if custom_white_space_or_comment.is_empty() {
maybe_line_break
} else {
@ -103,7 +95,14 @@ impl Program {
})
.collect::<String>()
.trim()
.to_string()
.to_string();
// Insert a final new line if the user wants it.
if options.insert_final_newline {
format!("{}\n", result)
} else {
result
}
}
/// Returns the body item that includes the given character position.
@ -118,6 +117,18 @@ impl Program {
None
}
/// Returns the body item that includes the given character position.
pub fn get_mut_body_item_for_position(&mut self, pos: usize) -> Option<&mut BodyItem> {
for item in &mut self.body {
let source_range: SourceRange = item.clone().into();
if source_range.contains(pos) {
return Some(item);
}
}
None
}
/// Returns a value that includes the given character position.
/// This is a bit more recursive than `get_body_item_for_position`.
pub fn get_value_for_position(&self, pos: usize) -> Option<&Value> {
@ -150,6 +161,82 @@ impl Program {
symbols
}
/// Rename the variable declaration at the given position.
pub fn rename_symbol(&mut self, new_name: &str, pos: usize) {
// The position must be within the variable declaration.
let mut old_name = None;
for item in &mut self.body {
match item {
BodyItem::ExpressionStatement(_expression_statement) => {
continue;
}
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
if let Some(var_old_name) = variable_declaration.rename_symbol(new_name, pos) {
old_name = Some(var_old_name);
break;
}
}
BodyItem::ReturnStatement(_return_statement) => continue,
}
}
if let Some(old_name) = old_name {
// Now rename all the identifiers in the rest of the program.
self.rename_identifiers(&old_name, new_name);
} else {
// Okay so this was not a top level variable declaration.
// But it might be a variable declaration inside a function or function params.
// So we need to check that.
let Some(ref mut item) = self.get_mut_body_item_for_position(pos) else {
return;
};
// Recurse over the item.
let mut value = match item {
BodyItem::ExpressionStatement(ref mut expression_statement) => {
Some(&mut expression_statement.expression)
}
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
variable_declaration.get_mut_value_for_position(pos)
}
BodyItem::ReturnStatement(ref mut return_statement) => Some(&mut return_statement.argument),
};
// Check if we have a function expression.
if let Some(Value::FunctionExpression(ref mut function_expression)) = &mut value {
// Check if the params to the function expression contain the position.
for param in &mut function_expression.params {
let param_source_range: SourceRange = param.clone().into();
if param_source_range.contains(pos) {
let old_name = param.name.clone();
// Rename the param.
param.rename(&old_name, new_name);
// Now rename all the identifiers in the rest of the program.
function_expression.body.rename_identifiers(&old_name, new_name);
return;
}
}
}
}
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
for item in &mut self.body {
match item {
BodyItem::ExpressionStatement(ref mut expression_statement) => {
expression_statement.expression.rename_identifiers(old_name, new_name);
}
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
variable_declaration.rename_identifiers(old_name, new_name);
}
BodyItem::ReturnStatement(ref mut return_statement) => {
return_statement.argument.rename_identifiers(old_name, new_name);
}
}
}
}
}
pub trait ValueMeta {
@ -249,19 +336,18 @@ pub enum Value {
}
impl Value {
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
let indentation = indentation.to_string() + if is_in_pipe_expression { " " } else { "" };
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
match &self {
Value::BinaryExpression(bin_exp) => bin_exp.recast(),
Value::ArrayExpression(array_exp) => array_exp.recast(&indentation, is_in_pipe_expression),
Value::ObjectExpression(ref obj_exp) => obj_exp.recast(&indentation, is_in_pipe_expression),
Value::BinaryExpression(bin_exp) => bin_exp.recast(options),
Value::ArrayExpression(array_exp) => array_exp.recast(options, indentation_level, is_in_pipe),
Value::ObjectExpression(ref obj_exp) => obj_exp.recast(options, indentation_level, is_in_pipe),
Value::MemberExpression(mem_exp) => mem_exp.recast(),
Value::Literal(literal) => literal.recast(),
Value::FunctionExpression(func_exp) => func_exp.recast(&indentation),
Value::CallExpression(call_exp) => call_exp.recast(&indentation, is_in_pipe_expression),
Value::FunctionExpression(func_exp) => func_exp.recast(options, indentation_level),
Value::CallExpression(call_exp) => call_exp.recast(options, indentation_level, is_in_pipe),
Value::Identifier(ident) => ident.name.to_string(),
Value::PipeExpression(pipe_exp) => pipe_exp.recast(&indentation),
Value::UnaryExpression(unary_exp) => unary_exp.recast(),
Value::PipeExpression(pipe_exp) => pipe_exp.recast(options, indentation_level),
Value::UnaryExpression(unary_exp) => unary_exp.recast(options),
Value::PipeSubstitution(_) => crate::parser::PIPE_SUBSTITUTION_OPERATOR.to_string(),
}
}
@ -317,6 +403,29 @@ impl Value {
Value::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
}
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
match self {
Value::Literal(_literal) => {}
Value::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
Value::BinaryExpression(ref mut binary_expression) => {
binary_expression.rename_identifiers(old_name, new_name)
}
Value::FunctionExpression(_function_identifier) => {}
Value::CallExpression(ref mut call_expression) => call_expression.rename_identifiers(old_name, new_name),
Value::PipeExpression(ref mut pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
Value::PipeSubstitution(_) => {}
Value::ArrayExpression(ref mut array_expression) => array_expression.rename_identifiers(old_name, new_name),
Value::ObjectExpression(ref mut object_expression) => {
object_expression.rename_identifiers(old_name, new_name)
}
Value::MemberExpression(ref mut member_expression) => {
member_expression.rename_identifiers(old_name, new_name)
}
Value::UnaryExpression(ref mut unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
}
}
}
impl From<Value> for crate::executor::SourceRange {
@ -355,13 +464,13 @@ impl From<&BinaryPart> for crate::executor::SourceRange {
}
impl BinaryPart {
fn recast(&self, indentation: &str) -> String {
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
match &self {
BinaryPart::Literal(literal) => literal.recast(),
BinaryPart::Identifier(identifier) => identifier.name.to_string(),
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(),
BinaryPart::CallExpression(call_expression) => call_expression.recast(indentation, false),
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(),
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(options),
BinaryPart::CallExpression(call_expression) => call_expression.recast(options, indentation_level, false),
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(options),
}
}
@ -422,6 +531,23 @@ impl BinaryPart {
BinaryPart::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
}
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
match self {
BinaryPart::Literal(_literal) => {}
BinaryPart::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
BinaryPart::BinaryExpression(ref mut binary_expression) => {
binary_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::CallExpression(ref mut call_expression) => {
call_expression.rename_identifiers(old_name, new_name)
}
BinaryPart::UnaryExpression(ref mut unary_expression) => {
unary_expression.rename_identifiers(old_name, new_name)
}
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
@ -436,17 +562,17 @@ pub struct NoneCodeNode {
impl NoneCodeNode {
pub fn value(&self) -> String {
match &self.value {
NoneCodeValue::Inline { value } => value.clone(),
NoneCodeValue::Block { value } => value.clone(),
NoneCodeValue::NewLineBlock { value } => value.clone(),
NoneCodeValue::InlineComment { value } => value.clone(),
NoneCodeValue::BlockComment { value } => value.clone(),
NoneCodeValue::NewLineBlockComment { value } => value.clone(),
NoneCodeValue::NewLine => "\n\n".to_string(),
}
}
pub fn format(&self, indentation: &str) -> String {
match &self.value {
NoneCodeValue::Inline { value } => format!(" // {}\n", value),
NoneCodeValue::Block { value } => {
NoneCodeValue::InlineComment { value } => format!(" // {}\n", value),
NoneCodeValue::BlockComment { value } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n" };
if value.contains('\n') {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
@ -454,7 +580,7 @@ impl NoneCodeNode {
format!("{}{}// {}\n", add_start_new_line, indentation, value)
}
}
NoneCodeValue::NewLineBlock { value } => {
NoneCodeValue::NewLineBlockComment { value } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n\n" };
if value.contains('\n') {
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
@ -471,9 +597,29 @@ impl NoneCodeNode {
#[ts(export)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum NoneCodeValue {
Inline { value: String },
Block { value: String },
NewLineBlock { value: String },
/// An inline comment.
/// An example of this is the following: `1 + 1 // This is an inline comment`.
InlineComment {
value: String,
},
/// A block comment.
/// An example of this is the following:
/// ```python,no_run
/// /* This is a
/// block comment */
/// 1 + 1
/// ```
/// Now this is important. The block comment is attached to the next line.
/// This is always the case. Also the block comment doesnt have a new line above it.
/// If it did it would be a `NewLineBlockComment`.
BlockComment {
value: String,
},
/// A block comment that has a new line above it.
/// The user explicitly added a new line above the block comment.
NewLineBlockComment {
value: String,
},
// A new line like `\n\n` NOT a new line like `\n`.
// This is also not a comment.
NewLine,
@ -539,13 +685,13 @@ pub struct CallExpression {
impl_value_meta!(CallExpression);
impl CallExpression {
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
format!(
"{}({})",
self.callee.name,
self.arguments
.iter()
.map(|arg| arg.recast(indentation, is_in_pipe_expression))
.map(|arg| arg.recast(options, indentation_level, is_in_pipe))
.collect::<Vec<String>>()
.join(", ")
)
@ -671,6 +817,15 @@ impl CallExpression {
None
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
self.callee.rename(old_name, new_name);
for arg in &mut self.arguments {
arg.rename_identifiers(old_name, new_name);
}
}
}
/// A function declaration.
@ -723,6 +878,50 @@ impl VariableDeclaration {
None
}
/// Returns a value that includes the given character position.
pub fn get_mut_value_for_position(&mut self, pos: usize) -> Option<&mut Value> {
for declaration in &mut self.declarations {
let source_range: SourceRange = declaration.clone().into();
if source_range.contains(pos) {
return Some(&mut declaration.init);
}
}
None
}
/// Rename the variable declaration at the given position.
/// This returns the old name of the variable, if it found one.
pub fn rename_symbol(&mut self, new_name: &str, pos: usize) -> Option<String> {
// The position must be within the variable declaration.
let source_range: SourceRange = self.clone().into();
if !source_range.contains(pos) {
return None;
}
for declaration in &mut self.declarations {
let declaration_source_range: SourceRange = declaration.id.clone().into();
if declaration_source_range.contains(pos) {
let old_name = declaration.id.name.clone();
declaration.id.name = new_name.to_string();
return Some(old_name);
}
}
None
}
pub fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
for declaration in &mut self.declarations {
// Skip the init for the variable with the new name since it is the one we are renaming.
if declaration.id.name == new_name {
continue;
}
declaration.init.rename_identifiers(old_name, new_name);
}
}
pub fn get_lsp_symbols(&self, code: &str) -> Vec<DocumentSymbol> {
let mut symbols = vec![];
@ -839,7 +1038,9 @@ impl VariableKind {
pub struct VariableDeclarator {
pub start: usize,
pub end: usize,
/// The identifier of the variable.
pub id: Identifier,
/// The value of the variable.
pub init: Value,
}
@ -901,6 +1102,15 @@ pub struct Identifier {
impl_value_meta!(Identifier);
impl Identifier {
/// Rename all identifiers that have the old name to the new given name.
fn rename(&mut self, old_name: &str, new_name: &str) {
if self.name == old_name {
self.name = new_name.to_string();
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(tag = "type")]
@ -923,27 +1133,35 @@ pub struct ArrayExpression {
impl_value_meta!(ArrayExpression);
impl ArrayExpression {
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
let flat_recast = format!(
"[{}]",
self.elements
.iter()
.map(|el| el.recast("", false))
.map(|el| el.recast(options, 0, false))
.collect::<Vec<String>>()
.join(", ")
);
let max_array_length = 40;
if flat_recast.len() > max_array_length {
let indentation = indentation.to_string() + " ";
let inner_indentation = if is_in_pipe {
options.get_indentation_offset_pipe(indentation_level + 1)
} else {
options.get_indentation(indentation_level + 1)
};
format!(
"[\n{}{}\n{}]",
indentation,
inner_indentation,
self.elements
.iter()
.map(|el| el.recast(&indentation, false))
.map(|el| el.recast(options, indentation_level, false))
.collect::<Vec<String>>()
.join(format!(",\n{}", indentation).as_str()),
if is_in_pipe_expression { " " } else { "" }
.join(format!(",\n{}", inner_indentation).as_str()),
if is_in_pipe {
options.get_indentation_offset_pipe(indentation_level)
} else {
options.get_indentation(indentation_level)
},
)
} else {
flat_recast
@ -1019,6 +1237,13 @@ impl ArrayExpression {
}],
})
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
for element in &mut self.elements {
element.rename_identifiers(old_name, new_name);
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
@ -1031,27 +1256,35 @@ pub struct ObjectExpression {
}
impl ObjectExpression {
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
let flat_recast = format!(
"{{ {} }}",
self.properties
.iter()
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast("", false)) })
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast(options, 0, false)) })
.collect::<Vec<String>>()
.join(", ")
);
let max_array_length = 40;
if flat_recast.len() > max_array_length {
let indentation = indentation.to_owned() + " ";
let inner_indentation = if is_in_pipe {
options.get_indentation_offset_pipe(indentation_level + 1)
} else {
options.get_indentation(indentation_level + 1)
};
format!(
"{{\n{}{}\n{}}}",
indentation,
inner_indentation,
self.properties
.iter()
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast("", is_in_pipe_expression)) })
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast(options, 0, false)) })
.collect::<Vec<String>>()
.join(format!(",\n{}", indentation).as_str()),
if is_in_pipe_expression { " " } else { "" }
.join(format!(",\n{}", inner_indentation).as_str()),
if is_in_pipe {
options.get_indentation_offset_pipe(indentation_level)
} else {
options.get_indentation(indentation_level)
},
)
} else {
flat_recast
@ -1125,6 +1358,13 @@ impl ObjectExpression {
}],
})
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
for property in &mut self.properties {
property.value.rename_identifiers(old_name, new_name);
}
}
}
impl_value_meta!(ObjectExpression);
@ -1342,6 +1582,21 @@ impl MemberExpression {
}))
}
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
match &mut self.object {
MemberObject::MemberExpression(ref mut member_expression) => {
member_expression.rename_identifiers(old_name, new_name)
}
MemberObject::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
}
match &mut self.property {
LiteralIdentifier::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
LiteralIdentifier::Literal(_) => {}
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
@ -1370,7 +1625,7 @@ impl BinaryExpression {
self.operator.precedence()
}
fn recast(&self) -> String {
fn recast(&self, options: &FormatOptions) -> String {
let maybe_wrap_it = |a: String, doit: bool| -> String {
if doit {
format!("({})", a)
@ -1393,9 +1648,9 @@ impl BinaryExpression {
format!(
"{} {} {}",
maybe_wrap_it(self.left.recast(""), should_wrap_left),
maybe_wrap_it(self.left.recast(options, 0), should_wrap_left),
self.operator,
maybe_wrap_it(self.right.recast(""), should_wrap_right)
maybe_wrap_it(self.right.recast(options, 0), should_wrap_right)
)
}
@ -1458,6 +1713,12 @@ impl BinaryExpression {
}],
})
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
self.left.rename_identifiers(old_name, new_name);
self.right.rename_identifiers(old_name, new_name);
}
}
pub fn parse_json_number_as_f64(j: &serde_json::Value, source_range: SourceRange) -> Result<f64, KclError> {
@ -1532,8 +1793,8 @@ pub struct UnaryExpression {
impl_value_meta!(UnaryExpression);
impl UnaryExpression {
fn recast(&self) -> String {
format!("{}{}", &self.operator, self.argument.recast(""))
fn recast(&self, options: &FormatOptions) -> String {
format!("{}{}", &self.operator, self.argument.recast(options, 0))
}
pub fn get_result(
@ -1565,6 +1826,11 @@ impl UnaryExpression {
None
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
self.argument.rename_identifiers(old_name, new_name);
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, FromStr, Display)]
@ -1595,13 +1861,13 @@ pub struct PipeExpression {
impl_value_meta!(PipeExpression);
impl PipeExpression {
fn recast(&self, indentation: &str) -> String {
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
self.body
.iter()
.enumerate()
.map(|(index, statement)| {
let indentation = indentation.to_string() + " ";
let mut s = statement.recast(&indentation, true);
let indentation = options.get_indentation(indentation_level + 1);
let mut s = statement.recast(options, indentation_level + 1, true);
let non_code_meta = self.non_code_meta.clone();
if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) {
s += non_code_meta_value.format(&indentation).trim_end_matches('\n')
@ -1641,6 +1907,13 @@ impl PipeExpression {
pipe_info.index = 0;
execute_pipe_body(memory, &self.body, pipe_info, self.into(), engine)
}
/// Rename all identifiers that have the old name to the new given name.
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
for statement in &mut self.body {
statement.rename_identifiers(old_name, new_name);
}
}
}
fn execute_pipe_body(
@ -1706,17 +1979,16 @@ pub struct FunctionExpression {
impl_value_meta!(FunctionExpression);
impl FunctionExpression {
pub fn recast(&self, indentation: &str) -> String {
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
format!(
"({}) => {{\n{}{}{}\n}}",
"({}) => {{\n{}{}\n}}",
self.params
.iter()
.map(|param| param.name.clone())
.collect::<Vec<String>>()
.join(", "),
indentation,
" ",
self.body.recast(" ", true)
options.get_indentation(indentation_level + 1),
self.body.recast(options, indentation_level + 1)
)
}
@ -1756,6 +2028,58 @@ pub enum Hover {
},
}
/// Format options.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct FormatOptions {
/// Size of a tab in spaces.
pub tab_size: usize,
/// Prefer tabs over spaces.
pub use_tabs: bool,
/// How to handle the final newline in the file.
/// If true, ensure file ends with a newline.
/// If false, ensure file does not end with a newline.
pub insert_final_newline: bool,
}
impl Default for FormatOptions {
fn default() -> Self {
Self::new()
}
}
impl FormatOptions {
/// Define the default format options.
/// We use 2 spaces for indentation.
pub fn new() -> Self {
Self {
tab_size: 2,
use_tabs: false,
insert_final_newline: false,
}
}
/// Get the indentation string for the given level.
pub fn get_indentation(&self, level: usize) -> String {
if self.use_tabs {
"\t".repeat(level)
} else {
" ".repeat(level * self.tab_size)
}
}
/// Get the indentation string for the given level.
/// But offset the pipe operator (and a space) by one level.
pub fn get_indentation_offset_pipe(&self, level: usize) -> String {
if self.use_tabs {
"\t".repeat(level + 1)
} else {
" ".repeat(level * self.tab_size) + " ".repeat(PIPE_OPERATOR.len() + 1).as_str()
}
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -1797,7 +2121,7 @@ show(part001)"#;
let some_program: crate::abstract_syntax_tree_types::Program =
serde_json::from_str(some_program_string).unwrap();
let recasted = some_program.recast("", false);
let recasted = some_program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const part001 = startSketchAt('default')
@ -1816,7 +2140,7 @@ show(part001)"#
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const part001 = startSketchAt([0.0, 5.0])
@ -1834,7 +2158,7 @@ show(part001)"#
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const part001 = startSketchAt([0.0, 5.0])
@ -1852,7 +2176,7 @@ show(part001)"#
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const part001 = startSketchAt([0.0, 5.0])
@ -1877,7 +2201,7 @@ show(part001)"#
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const myFn = () => {
@ -1913,7 +2237,7 @@ const mySk1 = startSketchAt([0, 0])
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"// comment at start
@ -1940,9 +2264,9 @@ a comment between pipe expression statements */
|> line({ to: [0.62, 4.15], tag: 'seg01' }, %)
|> line([2.77, -1.24], %)
|> angledLineThatIntersects({
angle: 201,
offset: -1.35,
intersectTag: 'seg01'
angle: 201,
offset: -1.35,
intersectTag: 'seg01'
}, %)
|> line([-0.42, -1.72], %)
@ -1951,7 +2275,7 @@ show(part001)"#;
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
}
@ -1964,12 +2288,19 @@ const yo = {
anum: 2,
identifier: three,
binExp: 4 + 5
}"#;
}
const yo = [
1,
" 2,",
"three",
4 + 5,
" hey oooooo really long long long"
]"#;
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
}
@ -1987,7 +2318,7 @@ const things = "things"
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string.trim());
}
@ -2005,7 +2336,125 @@ const things = "things"
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast("", false);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string.trim());
}
#[test]
fn test_recast_array_new_line_in_pipe() {
let some_program_string = r#"const myVar = 3
const myVar2 = 5
const myVar3 = 6
const myAng = 40
const myAng2 = 134
const part001 = startSketchAt([0, 0])
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|> angledLineToX([
-angleToMatchLengthX('seg01', myVar, %),
myVar
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|> angledLineToY([
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
}
#[test]
fn test_recast_array_new_line_in_pipe_custom() {
let some_program_string = r#"const myVar = 3
const myVar2 = 5
const myVar3 = 6
const myAng = 40
const myAng2 = 134
const part001 = startSketchAt([0, 0])
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|> angledLineToX([
-angleToMatchLengthX('seg01', myVar, %),
myVar
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|> angledLineToY([
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
"#;
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(
&FormatOptions {
tab_size: 3,
use_tabs: false,
insert_final_newline: true,
},
0,
);
assert_eq!(recasted, some_program_string);
}
#[test]
fn test_recast_after_rename_std() {
let some_program_string = r#"const part001 = startSketchAt([0.0000000000, 5.0000000000])
|> line([0.4900857016, -0.0240763666], %)
const part002 = "part002"
const things = [part001, 0.0]
let blah = 1
const foo = false
let baz = {a: 1, part001: "thing"}
fn ghi = (part001) => {
return part001
}
show(part001)"#;
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("mySuperCoolPart", 6);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"const mySuperCoolPart = startSketchAt([0.0, 5.0])
|> line([0.4900857016, -0.0240763666], %)
const part002 = "part002"
const things = [mySuperCoolPart, 0.0]
let blah = 1
const foo = false
let baz = { a: 1, part001: "thing" }
fn ghi = (part001) => {
return part001
}
show(mySuperCoolPart)"#
);
}
#[test]
fn test_recast_after_rename_fn_args() {
let some_program_string = r#"fn ghi = (x, y, z) => {
return x
}"#;
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("newName", 10);
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"fn ghi = (newName, y, z) => {
return newName
}"#
);
}
}

View File

@ -642,7 +642,7 @@ pub fn execute(
for (index, param) in function_expression.params.iter().enumerate() {
fn_memory.add(
&param.name,
args.clone().get(index).unwrap().clone(),
args.get(index).unwrap().clone(),
param.into(),
)?;
}

View File

@ -228,8 +228,8 @@ impl ReversePolishNotation {
.collect::<Vec<Token>>(),
);
return rpn.parse();
} else if current_token.value == ")" {
if !self.operators.is_empty() && self.operators[self.operators.len() - 1].value != "(" {
} else if current_token.value == ")" && !self.operators.is_empty() {
if self.operators[self.operators.len() - 1].value != "(" {
// pop operators off the stack and push them to postFix until we find the matching '('
let rpn = ReversePolishNotation::new(
&self.parser.tokens,

View File

@ -336,17 +336,26 @@ impl Parser {
value: if start_end_string.starts_with("\n\n") && is_new_line_comment {
// Preserve if they want a whitespace line before the comment.
// But let's just allow one.
NoneCodeValue::NewLineBlock { value: full_string }
NoneCodeValue::NewLineBlockComment { value: full_string }
} else if is_new_line_comment {
NoneCodeValue::Block { value: full_string }
NoneCodeValue::BlockComment { value: full_string }
} else {
NoneCodeValue::Inline { value: full_string }
NoneCodeValue::InlineComment { value: full_string }
},
};
Ok((Some(node), end_index - 1))
}
fn next_meaningful_token(&self, index: usize, offset: Option<usize>) -> Result<TokenReturnWithNonCode, KclError> {
// There is no next meaningful token.
if index >= self.tokens.len() - 1 {
return Ok(TokenReturnWithNonCode {
token: None,
index: self.tokens.len() - 1,
non_code_node: None,
});
}
let new_index = index + offset.unwrap_or(1);
let Ok(token) = self.get_token(new_index) else {
return Ok(TokenReturnWithNonCode {
@ -405,7 +414,7 @@ impl Parser {
if found_another_opening_brace {
return self.find_closing_brace(index + 1, brace_count + 1, search_opening_brace);
}
if found_another_closing_brace {
if found_another_closing_brace && brace_count > 0 {
return self.find_closing_brace(index + 1, brace_count - 1, search_opening_brace);
}
// non-brace token, increment and continue
@ -610,6 +619,12 @@ impl Parser {
fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
let current_token = self.get_token(index)?;
let mut keys_info = self.collect_object_keys(index, None)?;
if keys_info.is_empty() {
return Err(KclError::Syntax(KclErrorDetails {
source_ranges: vec![current_token.into()],
message: "expected to be started on a identifier or literal".to_string(),
}));
}
let last_key = keys_info[keys_info.len() - 1].clone();
let first_key = keys_info.remove(0);
let root = self.make_identifier(index)?;
@ -679,7 +694,11 @@ impl Parser {
return Ok(index);
}
let next_right = self.next_meaningful_token(maybe_operator.index, None)?;
self.find_end_of_binary_expression(next_right.index)
if next_right.index != index {
self.find_end_of_binary_expression(next_right.index)
} else {
Ok(index)
}
} else {
Ok(index)
}
@ -847,6 +866,8 @@ impl Parser {
fn make_array_expression(&self, index: usize) -> Result<ArrayReturn, KclError> {
let opening_brace_token = self.get_token(index)?;
let first_element_token = self.next_meaningful_token(index, None)?;
// Make sure there is a closing brace.
let _closing_brace = self.find_closing_brace(index, 0, "")?;
let array_elements = self.make_array_elements(first_element_token.index, Vec::new())?;
Ok(ArrayReturn {
expression: ArrayExpression {
@ -1018,7 +1039,7 @@ impl Parser {
} else {
return Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![argument_token_token.clone().into()],
message: format!("Unexpected token {} ", argument_token_token.value),
message: format!("Unexpected token {}", argument_token_token.value),
}));
};
}
@ -1043,18 +1064,18 @@ impl Parser {
Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![argument_token_token.clone().into()],
message: format!("Unexpected token {} ", argument_token_token.value),
message: format!("Unexpected token {}", argument_token_token.value),
}))
} else {
Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![brace_or_comma_token.into()],
message: format!("Unexpected token {} ", brace_or_comma_token.value),
message: format!("Unexpected token {}", brace_or_comma_token.value),
}))
}
} else {
Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![brace_or_comma_token.into()],
message: format!("Unexpected token {} ", brace_or_comma_token.value),
message: format!("Unexpected token {}", brace_or_comma_token.value),
}))
}
}
@ -1063,6 +1084,8 @@ impl Parser {
let current_token = self.get_token(index)?;
let brace_token = self.next_meaningful_token(index, None)?;
let callee = self.make_identifier(index)?;
// Make sure there is a closing brace.
let _closing_brace_token = self.find_closing_brace(brace_token.index, 0, "")?;
let args = self.make_arguments(brace_token.index, vec![])?;
let closing_brace_token = self.get_token(args.last_index)?;
let function = if let Some(stdlib_fn) = self.stdlib.get(&callee.name) {
@ -1105,42 +1128,42 @@ impl Parser {
) -> Result<VariableDeclaratorsReturn, KclError> {
let current_token = self.get_token(index)?;
let assignment = self.next_meaningful_token(index, None)?;
if let Some(assignment_token) = assignment.token {
let contents_start_token = self.next_meaningful_token(assignment.index, None)?;
let pipe_start_index = if assignment_token.token_type == TokenType::Operator {
contents_start_token.index
} else {
assignment.index
};
let next_pipe_operator = self.has_pipe_operator(pipe_start_index, None)?;
let init: Value;
let last_index = if next_pipe_operator.token.is_some() {
let pipe_expression_result = self.make_pipe_expression(assignment.index)?;
init = Value::PipeExpression(Box::new(pipe_expression_result.expression));
pipe_expression_result.last_index
} else {
let value_result = self.make_value(contents_start_token.index)?;
init = value_result.value;
value_result.last_index
};
let current_declarator = VariableDeclarator {
start: current_token.start,
end: self.get_token(last_index)?.end,
id: self.make_identifier(index)?,
init,
};
let mut declarations = previous_declarators;
declarations.push(current_declarator);
Ok(VariableDeclaratorsReturn {
declarations,
last_index,
})
} else {
Err(KclError::Unimplemented(KclErrorDetails {
let Some(assignment_token) = assignment.token else {
return Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![current_token.clone().into()],
message: format!("Unexpected token {} ", current_token.value),
}))
}
message: format!("Unexpected token {}", current_token.value),
}));
};
let contents_start_token = self.next_meaningful_token(assignment.index, None)?;
let pipe_start_index = if assignment_token.token_type == TokenType::Operator {
contents_start_token.index
} else {
assignment.index
};
let next_pipe_operator = self.has_pipe_operator(pipe_start_index, None)?;
let init: Value;
let last_index = if next_pipe_operator.token.is_some() {
let pipe_expression_result = self.make_pipe_expression(assignment.index)?;
init = Value::PipeExpression(Box::new(pipe_expression_result.expression));
pipe_expression_result.last_index
} else {
let value_result = self.make_value(contents_start_token.index)?;
init = value_result.value;
value_result.last_index
};
let current_declarator = VariableDeclarator {
start: current_token.start,
end: self.get_token(last_index)?.end,
id: self.make_identifier(index)?,
init,
};
let mut declarations = previous_declarators;
declarations.push(current_declarator);
Ok(VariableDeclaratorsReturn {
declarations,
last_index,
})
}
fn make_variable_declaration(&self, index: usize) -> Result<VariableDeclarationResult, KclError> {
@ -1184,7 +1207,7 @@ impl Parser {
} else {
Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![brace_or_comma_token.into()],
message: format!("Unexpected token {} ", brace_or_comma_token.value),
message: format!("Unexpected token {}", brace_or_comma_token.value),
}))
}
}
@ -1192,6 +1215,12 @@ impl Parser {
fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
let current_token = self.get_token(index)?;
let next_token = self.next_meaningful_token(index, None)?;
if next_token.token.is_none() {
return Err(KclError::Syntax(KclErrorDetails {
source_ranges: vec![current_token.into()],
message: "expected another token".to_string(),
}));
}
let argument = self.make_value(next_token.index)?;
let argument_token = self.get_token(argument.last_index)?;
Ok(UnaryExpressionResult {
@ -1232,7 +1261,6 @@ impl Parser {
return Ok(ExpressionStatementResult {
expression: ExpressionStatement {
start: current_token.start,
// end: call_expression.last_index,
end,
expression: Value::CallExpression(Box::new(call_expression.expression)),
},
@ -1314,6 +1342,8 @@ impl Parser {
fn make_object_expression(&self, index: usize) -> Result<ObjectExpressionResult, KclError> {
let opening_brace_token = self.get_token(index)?;
// Make sure there is a closing brace.
let _closing_brace = self.find_closing_brace(index, 0, "")?;
let first_property_token = self.next_meaningful_token(index, None)?;
let object_properties = self.make_object_properties(first_property_token.index, vec![])?;
Ok(ObjectExpressionResult {
@ -1665,7 +1695,7 @@ const key = 'c'"#,
Some(NoneCodeNode {
start: 38,
end: 60,
value: NoneCodeValue::Block {
value: NoneCodeValue::BlockComment {
value: "this is a comment".to_string(),
},
}),
@ -1687,7 +1717,7 @@ const key = 'c'"#,
Some(NoneCodeNode {
start: 106,
end: 166,
value: NoneCodeValue::Block {
value: NoneCodeValue::BlockComment {
value: "this is\n a comment\n spanning a few lines".to_string(),
},
}),
@ -2716,4 +2746,139 @@ show(mySk1)"#;
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("file is empty"));
}
#[test]
fn test_parse_half_pipe_small() {
let tokens = crate::tokeniser::lexer(
"const secondExtrude = startSketchAt([0,0])
|",
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_half_pipe() {
let tokens = crate::tokeniser::lexer(
"const height = 10
const firstExtrude = startSketchAt([0,0])
|> line([0, 8], %)
|> line([20, 0], %)
|> line([0, -8], %)
|> close(%)
|> extrude(2, %)
show(firstExtrude)
const secondExtrude = startSketchAt([0,0])
|",
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_greater_bang() {
let tokens = crate::tokeniser::lexer(">!");
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_ok());
}
#[test]
fn test_parse_z_percent_parens() {
let tokens = crate::tokeniser::lexer("z%)");
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
}
#[test]
fn test_parse_parens_unicode() {
let tokens = crate::tokeniser::lexer("");
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_ok());
}
#[test]
fn test_parse_nested_open_brackets() {
let tokens = crate::tokeniser::lexer(
r#"
z(-[["#,
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("unexpected end"));
}
#[test]
fn test_parse_weird_new_line_function() {
let tokens = crate::tokeniser::lexer(
r#"z
(--#"#,
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("unexpected end"));
}
#[test]
fn test_parse_weird_lots_of_fancy_brackets() {
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("unexpected end"));
}
#[test]
fn test_parse_weird_close_before_open() {
let tokens = crate::tokeniser::lexer(
r#"fn)n
e
["#,
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("expected to be started on a identifier or literal"));
}
#[test]
fn test_parse_weird_close_before_nada() {
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("expected another token"));
}
#[test]
fn test_parse_weird_lots_of_slashes() {
let tokens = crate::tokeniser::lexer(
r#"J///////////o//+///////////P++++*++++++P///////˟
++4"#,
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result
.err()
.unwrap()
.to_string()
.contains("unexpected end of expression"));
}
}

View File

@ -233,6 +233,7 @@ impl LanguageServer for Backend {
document_symbol_provider: Some(OneOf::Left(true)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
inlay_hint_provider: Some(OneOf::Left(true)),
rename_provider: Some(OneOf::Left(true)),
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
SemanticTokensRegistrationOptions {
text_document_registration_options: {
@ -552,19 +553,14 @@ impl LanguageServer for Backend {
return Ok(None);
};
// Now recast it.
// Make spaces for the tab size.
/*let mut tab_size = String::new();
for _ in 0..params.options.tab_size {
tab_size.push(' ');
}*/
// TODO: use the tab size.
let mut recast = ast.recast("", false).trim().to_string();
if let Some(insert_final_newline) = params.options.insert_final_newline {
if insert_final_newline {
recast.push('\n');
}
}
let recast = ast.recast(
&crate::abstract_syntax_tree_types::FormatOptions {
tab_size: params.options.tab_size as usize,
insert_final_newline: params.options.insert_final_newline.unwrap_or(false),
use_tabs: !params.options.insert_spaces,
},
0,
);
let source_range = SourceRange([0, current_code.len() - 1]);
let range = source_range.to_lsp_range(&current_code);
Ok(Some(vec![TextEdit {
@ -572,6 +568,43 @@ impl LanguageServer for Backend {
range,
}]))
}
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
let filename = params.text_document_position.text_document.uri.to_string();
let Some(current_code) = self.current_code_map.get(&filename) else {
return Ok(None);
};
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::tokeniser::lexer(&current_code);
let parser = crate::parser::Parser::new(tokens);
let Ok(mut ast) = parser.ast() else {
return Ok(None);
};
// Let's convert the position to a character index.
let pos = position_to_char_index(params.text_document_position.position, &current_code);
// Now let's perform the rename on the ast.
ast.rename_symbol(&params.new_name, pos);
// Now recast it.
let recast = ast.recast(&Default::default(), 0);
let source_range = SourceRange([0, current_code.len() - 1]);
let range = source_range.to_lsp_range(&current_code);
Ok(Some(WorkspaceEdit {
changes: Some(HashMap::from([(
params.text_document_position.text_document.uri,
vec![TextEdit {
new_text: recast,
range,
}],
)])),
document_changes: None,
change_annotations: None,
}))
}
}
/// Get completions from our stdlib.

View File

@ -206,8 +206,8 @@ fn is_block_comment(character: &str) -> bool {
BLOCKCOMMENT.is_match(character)
}
fn match_first(str: &str, regex: &Regex) -> Option<String> {
regex.find(str).map(|the_match| the_match.as_str().to_string())
fn match_first(s: &str, regex: &Regex) -> Option<String> {
regex.find(s).map(|the_match| the_match.as_str().to_string())
}
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
@ -219,8 +219,8 @@ fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
}
}
fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
let str_from_index = &str[start_index..];
fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
let str_from_index = &s.chars().skip(start_index).collect::<String>();
if is_string(str_from_index) {
return Some(make_token(
TokenType::String,
@ -348,21 +348,22 @@ fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
None
}
pub fn lexer(str: &str) -> Vec<Token> {
fn recursively_tokenise(str: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
if current_index >= str.len() {
return previous_tokens;
}
let token = return_token_at_index(str, current_index);
let Some(token) = token else {
return recursively_tokenise(str, current_index + 1, previous_tokens);
};
let mut new_tokens = previous_tokens;
let token_length = token.value.len();
new_tokens.push(token);
recursively_tokenise(str, current_index + token_length, new_tokens)
fn recursively_tokenise(s: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
if current_index >= s.len() {
return previous_tokens;
}
recursively_tokenise(str, 0, Vec::new())
let token = return_token_at_index(s, current_index);
let Some(token) = token else {
return recursively_tokenise(s, current_index + 1, previous_tokens);
};
let mut new_tokens = previous_tokens;
let token_length = token.value.len();
new_tokens.push(token);
recursively_tokenise(s, current_index + token_length, new_tokens)
}
pub fn lexer(s: &str) -> Vec<Token> {
recursively_tokenise(s, 0, Vec::new())
}
#[cfg(test)]

View File

@ -76,7 +76,8 @@ pub fn recast_wasm(json_str: &str) -> Result<JsValue, JsError> {
let program: kcl_lib::abstract_syntax_tree_types::Program =
serde_json::from_str(json_str).map_err(JsError::from)?;
let result = program.recast("", false);
// Use the default options until we integrate into the UI the ability to change them.
let result = program.recast(&Default::default(), 0);
Ok(JsValue::from_serde(&result)?)
}

View File

@ -1530,10 +1530,10 @@
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
"@kittycad/lib@^0.0.35":
version "0.0.35"
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.35.tgz#bde8868048f9fd53f8309e7308aeba622898b935"
integrity sha512-qM8AyP2QUlDfPWNxb1Fs/Pq9AebGVDN1OHjByxbGomKCy0jFdN2TsyDdhQH/CAZGfBCgPEfr5bq6rkUBGSXcNw==
"@kittycad/lib@^0.0.36":
version "0.0.36"
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.36.tgz#7b9676c975bc629f227d41897b38e7d73280db71"
integrity sha512-4bVXTaIzpSRuJAuLbAD/CWWTns7H/IxogPj0827n8mwXDkj+65EBCNXhJGWRkMG2CeTVJVk1LSWKlaHE+ToxGA==
dependencies:
node-fetch "3.3.2"
openapi-types "^12.0.0"