Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
3da6fc3b7e | |||
34dd15ead7 | |||
b3d441e9d6 | |||
4b3dc3756c | |||
10027b98b5 | |||
da17dad63b | |||
fba6c422a8 | |||
0b4b93932d | |||
f42900ec46 | |||
eeca624ba6 | |||
84d08bad16 | |||
1181f33e9d | |||
797e200d08 | |||
d2f231066b |
57
.github/workflows/ci.yml
vendored
57
.github/workflows/ci.yml
vendored
@ -13,17 +13,31 @@ jobs:
|
|||||||
check-format:
|
check-format:
|
||||||
runs-on: 'ubuntu-20.04'
|
runs-on: 'ubuntu-20.04'
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: '.nvmrc'
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
- run: yarn install
|
- run: yarn install
|
||||||
|
|
||||||
- run: yarn fmt-check
|
- run: yarn fmt-check
|
||||||
|
|
||||||
|
check-types:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
|
- run: yarn install
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
|
- run: yarn build:wasm
|
||||||
|
- run: yarn tsc
|
||||||
|
|
||||||
|
|
||||||
build-test-web:
|
build-test-web:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
@ -36,12 +50,15 @@ jobs:
|
|||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: '.nvmrc'
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
|
|
||||||
- run: yarn install
|
- run: yarn install
|
||||||
|
|
||||||
- run: yarn build:wasm
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
- run: yarn tsc
|
- run: yarn build:wasm
|
||||||
|
|
||||||
- run: yarn simpleserver:ci
|
- run: yarn simpleserver:ci
|
||||||
|
|
||||||
@ -49,14 +66,12 @@ jobs:
|
|||||||
|
|
||||||
- run: yarn test:cov
|
- run: yarn test:cov
|
||||||
|
|
||||||
- run: yarn test:rust
|
|
||||||
|
|
||||||
- id: export_version
|
- id: export_version
|
||||||
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
|
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
|
||||||
build-apps:
|
build-apps:
|
||||||
needs: [check-format, build-test-web]
|
needs: [check-format, build-test-web, check-types]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@ -87,6 +102,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
workspaces: './src-tauri -> target'
|
workspaces: './src-tauri -> target'
|
||||||
|
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
- name: wasm prep
|
- name: wasm prep
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@ -110,15 +129,22 @@ jobs:
|
|||||||
- name: Fix format
|
- name: Fix format
|
||||||
run: yarn fmt
|
run: yarn fmt
|
||||||
|
|
||||||
|
- name: install apple silicon target mac
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
run: |
|
||||||
|
rustup target add aarch64-apple-darwin
|
||||||
|
|
||||||
- name: Build the app for the current platform (no upload)
|
- name: Build the app for the current platform (no upload)
|
||||||
uses: tauri-apps/tauri-action@v0
|
uses: tauri-apps/tauri-action@v0
|
||||||
env:
|
env:
|
||||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||||
|
with:
|
||||||
|
args: ${{ matrix.os == 'macos-latest' && '--target universal-apple-darwin' || '' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: src-tauri/target/release/bundle/*/*
|
path: ${{ matrix.os == 'macos-latest' && 'src-tauri/target/universal-apple-darwin/release/bundle/*/*' || 'src-tauri/target/release/bundle/*/*' }}
|
||||||
|
|
||||||
|
|
||||||
publish-apps-release:
|
publish-apps-release:
|
||||||
@ -133,8 +159,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate the update static endpoint
|
- name: Generate the update static endpoint
|
||||||
run: |
|
run: |
|
||||||
ls -l artifact
|
ls -l artifact/*/*itty*
|
||||||
ls -l artifact/*
|
|
||||||
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
|
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
|
||||||
LINUX_SIG=`cat artifact/appimage/*.AppImage.tar.gz.sig`
|
LINUX_SIG=`cat artifact/appimage/*.AppImage.tar.gz.sig`
|
||||||
WINDOWS_SIG=`cat artifact/nsis/*.nsis.zip.sig`
|
WINDOWS_SIG=`cat artifact/nsis/*.nsis.zip.sig`
|
||||||
@ -154,6 +179,10 @@ jobs:
|
|||||||
"signature": $darwin_sig,
|
"signature": $darwin_sig,
|
||||||
"url": $darwin_url
|
"url": $darwin_url
|
||||||
},
|
},
|
||||||
|
"darwin-aarch64": {
|
||||||
|
"signature": $darwin_sig,
|
||||||
|
"url": $darwin_url
|
||||||
|
},
|
||||||
"linux-x86_64": {
|
"linux-x86_64": {
|
||||||
"signature": $linux_sig,
|
"signature": $linux_sig,
|
||||||
"url": $linux_url
|
"url": $linux_url
|
||||||
@ -180,7 +209,7 @@ jobs:
|
|||||||
uses: google-github-actions/upload-cloud-storage@v1.0.3
|
uses: google-github-actions/upload-cloud-storage@v1.0.3
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
glob: '*/*'
|
glob: '*/*itty*'
|
||||||
parent: false
|
parent: false
|
||||||
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
|
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
|
||||||
|
|
||||||
@ -193,4 +222,4 @@ jobs:
|
|||||||
- name: Upload release files to Github
|
- name: Upload release files to Github
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
files: artifact/*/*
|
files: artifact/*/*itty*
|
||||||
|
@ -5,3 +5,5 @@ coverage
|
|||||||
# Ignore Rust projects:
|
# Ignore Rust projects:
|
||||||
*.rs
|
*.rs
|
||||||
target
|
target
|
||||||
|
src/wasm-lib/pkg
|
||||||
|
src/wasm-lib/kcl/bindings
|
||||||
|
21
README.md
21
README.md
@ -86,3 +86,24 @@ The PR may serve as a place to discuss the human-readable changelog and extra QA
|
|||||||
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
|
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
|
||||||
|
|
||||||
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
|
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
|
||||||
|
|
||||||
|
## Fuzzing the parser
|
||||||
|
|
||||||
|
Make sure you install cargo fuzz:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cargo install cargo-fuzz
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cd src/wasm-lib/kcl
|
||||||
|
|
||||||
|
# list the fuzz targets
|
||||||
|
$ cargo fuzz list
|
||||||
|
|
||||||
|
# run the parser fuzzer
|
||||||
|
$ cargo +nightly fuzz run parser
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information on fuzzing you can check out
|
||||||
|
[this guide](https://rust-fuzz.github.io/book/cargo-fuzz.html).
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "untitled-app",
|
"name": "untitled-app",
|
||||||
"version": "0.3.1",
|
"version": "0.4.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/autocomplete": "^6.9.0",
|
"@codemirror/autocomplete": "^6.9.0",
|
||||||
@ -10,7 +10,7 @@
|
|||||||
"@fortawesome/react-fontawesome": "^0.2.0",
|
"@fortawesome/react-fontawesome": "^0.2.0",
|
||||||
"@headlessui/react": "^1.7.13",
|
"@headlessui/react": "^1.7.13",
|
||||||
"@headlessui/tailwindcss": "^0.2.0",
|
"@headlessui/tailwindcss": "^0.2.0",
|
||||||
"@kittycad/lib": "^0.0.35",
|
"@kittycad/lib": "^0.0.36",
|
||||||
"@lezer/javascript": "^1.4.7",
|
"@lezer/javascript": "^1.4.7",
|
||||||
"@open-rpc/client-js": "^1.8.1",
|
"@open-rpc/client-js": "^1.8.1",
|
||||||
"@react-hook/resize-observer": "^1.2.6",
|
"@react-hook/resize-observer": "^1.2.6",
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
"productName": "kittycad-modeling",
|
"productName": "kittycad-modeling",
|
||||||
"version": "0.3.1"
|
"version": "0.4.0"
|
||||||
},
|
},
|
||||||
"tauri": {
|
"tauri": {
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
|
64
src/App.tsx
64
src/App.tsx
@ -10,7 +10,7 @@ import { DebugPanel } from './components/DebugPanel'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { asyncParser } from './lang/abstractSyntaxTree'
|
import { asyncParser } from './lang/abstractSyntaxTree'
|
||||||
import { _executor } from './lang/executor'
|
import { _executor } from './lang/executor'
|
||||||
import CodeMirror from '@uiw/react-codemirror'
|
import CodeMirror, { Extension } from '@uiw/react-codemirror'
|
||||||
import { linter, lintGutter } from '@codemirror/lint'
|
import { linter, lintGutter } from '@codemirror/lint'
|
||||||
import { ViewUpdate, EditorView } from '@codemirror/view'
|
import { ViewUpdate, EditorView } from '@codemirror/view'
|
||||||
import {
|
import {
|
||||||
@ -54,6 +54,7 @@ import { useGlobalStateContext } from 'hooks/useGlobalStateContext'
|
|||||||
import { onboardingPaths } from 'routes/Onboarding'
|
import { onboardingPaths } from 'routes/Onboarding'
|
||||||
import { LanguageServerClient } from 'editor/lsp'
|
import { LanguageServerClient } from 'editor/lsp'
|
||||||
import kclLanguage from 'editor/lsp/language'
|
import kclLanguage from 'editor/lsp/language'
|
||||||
|
import { CSSRuleObject } from 'tailwindcss/types/config'
|
||||||
|
|
||||||
export function App() {
|
export function App() {
|
||||||
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
||||||
@ -78,6 +79,7 @@ export function App() {
|
|||||||
setArtifactMap,
|
setArtifactMap,
|
||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
setEngineCommandManager,
|
setEngineCommandManager,
|
||||||
|
highlightRange,
|
||||||
setHighlightRange,
|
setHighlightRange,
|
||||||
setCursor2,
|
setCursor2,
|
||||||
sourceRangeMap,
|
sourceRangeMap,
|
||||||
@ -91,7 +93,6 @@ export function App() {
|
|||||||
openPanes,
|
openPanes,
|
||||||
setOpenPanes,
|
setOpenPanes,
|
||||||
didDragInStream,
|
didDragInStream,
|
||||||
setDidDragInStream,
|
|
||||||
setStreamDimensions,
|
setStreamDimensions,
|
||||||
streamDimensions,
|
streamDimensions,
|
||||||
} = useStore((s) => ({
|
} = useStore((s) => ({
|
||||||
@ -112,6 +113,7 @@ export function App() {
|
|||||||
setArtifactMap: s.setArtifactNSourceRangeMaps,
|
setArtifactMap: s.setArtifactNSourceRangeMaps,
|
||||||
engineCommandManager: s.engineCommandManager,
|
engineCommandManager: s.engineCommandManager,
|
||||||
setEngineCommandManager: s.setEngineCommandManager,
|
setEngineCommandManager: s.setEngineCommandManager,
|
||||||
|
highlightRange: s.highlightRange,
|
||||||
setHighlightRange: s.setHighlightRange,
|
setHighlightRange: s.setHighlightRange,
|
||||||
isShiftDown: s.isShiftDown,
|
isShiftDown: s.isShiftDown,
|
||||||
setCursor: s.setCursor,
|
setCursor: s.setCursor,
|
||||||
@ -128,7 +130,6 @@ export function App() {
|
|||||||
openPanes: s.openPanes,
|
openPanes: s.openPanes,
|
||||||
setOpenPanes: s.setOpenPanes,
|
setOpenPanes: s.setOpenPanes,
|
||||||
didDragInStream: s.didDragInStream,
|
didDragInStream: s.didDragInStream,
|
||||||
setDidDragInStream: s.setDidDragInStream,
|
|
||||||
setStreamDimensions: s.setStreamDimensions,
|
setStreamDimensions: s.setStreamDimensions,
|
||||||
streamDimensions: s.streamDimensions,
|
streamDimensions: s.streamDimensions,
|
||||||
}))
|
}))
|
||||||
@ -138,7 +139,7 @@ export function App() {
|
|||||||
context: { token },
|
context: { token },
|
||||||
},
|
},
|
||||||
settings: {
|
settings: {
|
||||||
context: { showDebugPanel, theme, onboardingStatus },
|
context: { showDebugPanel, theme, onboardingStatus, textWrapping },
|
||||||
},
|
},
|
||||||
} = useGlobalStateContext()
|
} = useGlobalStateContext()
|
||||||
|
|
||||||
@ -332,11 +333,14 @@ export function App() {
|
|||||||
const unSubHover = engineCommandManager.subscribeToUnreliable({
|
const unSubHover = engineCommandManager.subscribeToUnreliable({
|
||||||
event: 'highlight_set_entity',
|
event: 'highlight_set_entity',
|
||||||
callback: ({ data }) => {
|
callback: ({ data }) => {
|
||||||
if (!data?.entity_id) {
|
if (data?.entity_id) {
|
||||||
setHighlightRange([0, 0])
|
|
||||||
} else {
|
|
||||||
const sourceRange = sourceRangeMap[data.entity_id]
|
const sourceRange = sourceRangeMap[data.entity_id]
|
||||||
setHighlightRange(sourceRange)
|
setHighlightRange(sourceRange)
|
||||||
|
} else if (
|
||||||
|
!highlightRange ||
|
||||||
|
(highlightRange[0] !== 0 && highlightRange[1] !== 0)
|
||||||
|
) {
|
||||||
|
setHighlightRange([0, 0])
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -385,9 +389,6 @@ export function App() {
|
|||||||
nativeEvent,
|
nativeEvent,
|
||||||
}) => {
|
}) => {
|
||||||
nativeEvent.preventDefault()
|
nativeEvent.preventDefault()
|
||||||
if (isMouseDownInStream) {
|
|
||||||
setDidDragInStream(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { x, y } = getNormalisedCoordinates({
|
const { x, y } = getNormalisedCoordinates({
|
||||||
clientX,
|
clientX,
|
||||||
@ -422,17 +423,6 @@ export function App() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const extraExtensions = useMemo(() => {
|
|
||||||
if (TEST) return []
|
|
||||||
return [
|
|
||||||
lintGutter(),
|
|
||||||
linter((_view) => {
|
|
||||||
return kclErrToDiagnostic(useStore.getState().kclErrors)
|
|
||||||
}),
|
|
||||||
EditorView.lineWrapping,
|
|
||||||
]
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
// So this is a bit weird, we need to initialize the lsp server and client.
|
// So this is a bit weird, we need to initialize the lsp server and client.
|
||||||
// But the server happens async so we break this into two parts.
|
// But the server happens async so we break this into two parts.
|
||||||
// Below is the client and server promise.
|
// Below is the client and server promise.
|
||||||
@ -472,6 +462,25 @@ export function App() {
|
|||||||
return plugin
|
return plugin
|
||||||
}, [lspClient, isLSPServerReady])
|
}, [lspClient, isLSPServerReady])
|
||||||
|
|
||||||
|
const editorExtensions = useMemo(() => {
|
||||||
|
const extensions = [lineHighlightField] as Extension[]
|
||||||
|
|
||||||
|
if (kclLSP) extensions.push(kclLSP)
|
||||||
|
|
||||||
|
// These extensions have proven to mess with vitest
|
||||||
|
if (!TEST) {
|
||||||
|
extensions.push(
|
||||||
|
lintGutter(),
|
||||||
|
linter((_view) => {
|
||||||
|
return kclErrToDiagnostic(useStore.getState().kclErrors)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
if (textWrapping === 'On') extensions.push(EditorView.lineWrapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
return extensions
|
||||||
|
}, [kclLSP, textWrapping])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="h-screen overflow-hidden relative flex flex-col cursor-pointer select-none"
|
className="h-screen overflow-hidden relative flex flex-col cursor-pointer select-none"
|
||||||
@ -513,7 +522,7 @@ export function App() {
|
|||||||
<CollapsiblePanel
|
<CollapsiblePanel
|
||||||
title="Code"
|
title="Code"
|
||||||
icon={faCode}
|
icon={faCode}
|
||||||
className="open:!mb-2 overflow-x-hidden"
|
className="open:!mb-2"
|
||||||
open={openPanes.includes('code')}
|
open={openPanes.includes('code')}
|
||||||
>
|
>
|
||||||
<div className="px-2 py-1">
|
<div className="px-2 py-1">
|
||||||
@ -527,16 +536,13 @@ export function App() {
|
|||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
id="code-mirror-override"
|
id="code-mirror-override"
|
||||||
className="overflow-x-hidden h-full"
|
className="full-height-subtract"
|
||||||
|
style={{ '--height-subtract': '4.25rem' } as CSSRuleObject}
|
||||||
>
|
>
|
||||||
<CodeMirror
|
<CodeMirror
|
||||||
className="h-full overflow-hidden-x"
|
className="h-full"
|
||||||
value={code}
|
value={code}
|
||||||
extensions={
|
extensions={editorExtensions}
|
||||||
kclLSP
|
|
||||||
? [kclLSP, lineHighlightField, ...extraExtensions]
|
|
||||||
: [lineHighlightField, ...extraExtensions]
|
|
||||||
}
|
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
onUpdate={onUpdate}
|
onUpdate={onUpdate}
|
||||||
theme={editorTheme}
|
theme={editorTheme}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.panel {
|
.panel {
|
||||||
@apply relative overflow-auto z-0;
|
@apply relative z-0;
|
||||||
@apply bg-chalkboard-10/70 backdrop-blur-sm;
|
@apply bg-chalkboard-10/70 backdrop-blur-sm;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ const CommandBar = () => {
|
|||||||
setCommandBarOpen(false)
|
setCommandBarOpen(false)
|
||||||
clearState()
|
clearState()
|
||||||
}}
|
}}
|
||||||
className="fixed inset-0 overflow-y-auto p-4 pt-[25vh]"
|
className="fixed inset-0 z-40 overflow-y-auto p-4 pt-[25vh]"
|
||||||
>
|
>
|
||||||
<Transition.Child
|
<Transition.Child
|
||||||
enter="duration-100 ease-out"
|
enter="duration-100 ease-out"
|
||||||
@ -207,7 +207,7 @@ const CommandBar = () => {
|
|||||||
leaveTo="opacity-0"
|
leaveTo="opacity-0"
|
||||||
as={Fragment}
|
as={Fragment}
|
||||||
>
|
>
|
||||||
<Dialog.Overlay className="fixed z-40 inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
|
<Dialog.Overlay className="fixed inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
|
||||||
</Transition.Child>
|
</Transition.Child>
|
||||||
<Transition.Child
|
<Transition.Child
|
||||||
enter="duration-100 ease-out"
|
enter="duration-100 ease-out"
|
||||||
@ -221,7 +221,7 @@ const CommandBar = () => {
|
|||||||
<Combobox
|
<Combobox
|
||||||
value={selectedCommand}
|
value={selectedCommand}
|
||||||
onChange={handleCommandSelection}
|
onChange={handleCommandSelection}
|
||||||
className="rounded relative mx-auto z-40 p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
|
className="rounded relative mx-auto p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
|
||||||
as="div"
|
as="div"
|
||||||
>
|
>
|
||||||
<div className="flex gap-2 items-center">
|
<div className="flex gap-2 items-center">
|
||||||
|
@ -12,6 +12,7 @@ import Loading from './Loading'
|
|||||||
|
|
||||||
export const Stream = ({ className = '' }) => {
|
export const Stream = ({ className = '' }) => {
|
||||||
const [isLoading, setIsLoading] = useState(true)
|
const [isLoading, setIsLoading] = useState(true)
|
||||||
|
const [clickCoords, setClickCoords] = useState<{ x: number; y: number }>()
|
||||||
const videoRef = useRef<HTMLVideoElement>(null)
|
const videoRef = useRef<HTMLVideoElement>(null)
|
||||||
const {
|
const {
|
||||||
mediaStream,
|
mediaStream,
|
||||||
@ -71,6 +72,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
setIsMouseDownInStream(true)
|
setIsMouseDownInStream(true)
|
||||||
|
setClickCoords({ x, y })
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
||||||
@ -124,6 +126,19 @@ export const Stream = ({ className = '' }) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
setDidDragInStream(false)
|
setDidDragInStream(false)
|
||||||
|
setClickCoords(undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleMouseMove: MouseEventHandler<HTMLVideoElement> = (e) => {
|
||||||
|
if (!clickCoords) return
|
||||||
|
|
||||||
|
const delta =
|
||||||
|
((clickCoords.x - e.clientX) ** 2 + (clickCoords.y - e.clientY) ** 2) **
|
||||||
|
0.5
|
||||||
|
|
||||||
|
if (delta > 5 && !didDragInStream) {
|
||||||
|
setDidDragInStream(true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -139,6 +154,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
onContextMenuCapture={(e) => e.preventDefault()}
|
onContextMenuCapture={(e) => e.preventDefault()}
|
||||||
onWheel={handleScroll}
|
onWheel={handleScroll}
|
||||||
onPlay={() => setIsLoading(false)}
|
onPlay={() => setIsLoading(false)}
|
||||||
|
onMouseMoveCapture={handleMouseMove}
|
||||||
className="w-full h-full"
|
className="w-full h-full"
|
||||||
/>
|
/>
|
||||||
{isLoading && (
|
{isLoading && (
|
||||||
|
@ -208,7 +208,13 @@ export class LanguageServerPlugin implements PluginValue {
|
|||||||
filterText: filterText ?? label,
|
filterText: filterText ?? label,
|
||||||
}
|
}
|
||||||
if (documentation) {
|
if (documentation) {
|
||||||
completion.info = formatContents(documentation)
|
completion.info = () => {
|
||||||
|
const htmlString = formatContents(documentation)
|
||||||
|
const htmlNode = document.createElement('div')
|
||||||
|
htmlNode.style.display = 'contents'
|
||||||
|
htmlNode.innerHTML = htmlString
|
||||||
|
return { dom: htmlNode }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return completion
|
return completion
|
||||||
|
@ -82,11 +82,22 @@ code {
|
|||||||
monospace;
|
monospace;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.full-height-subtract {
|
||||||
|
--height-subtract: 2.25rem;
|
||||||
|
height: 100%;
|
||||||
|
max-height: calc(100% - var(--height-subtract));
|
||||||
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-editor {
|
#code-mirror-override .cm-editor {
|
||||||
@apply bg-transparent;
|
@apply h-full bg-transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-scroller {
|
#code-mirror-override .cm-scroller {
|
||||||
|
@apply h-full;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-scroller::-webkit-scrollbar {
|
||||||
|
@apply h-0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-activeLine,
|
#code-mirror-override .cm-activeLine,
|
||||||
@ -137,14 +148,39 @@ code {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-tooltip {
|
#code-mirror-override .cm-tooltip {
|
||||||
font-size: 80%;
|
@apply text-xs shadow-md;
|
||||||
|
@apply bg-chalkboard-10 text-chalkboard-80;
|
||||||
|
@apply rounded-sm border-solid border border-chalkboard-40/30 border-l-liquid-10;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark #code-mirror-override .cm-tooltip {
|
||||||
|
@apply bg-chalkboard-110 text-chalkboard-40;
|
||||||
|
@apply border-chalkboard-70/20 border-l-liquid-70;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-tooltip-hover {
|
#code-mirror-override .cm-tooltip-hover {
|
||||||
|
@apply py-1 px-2 w-max max-w-md;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-tooltip-hover .documentation {
|
#code-mirror-override .cm-completionInfo {
|
||||||
padding: 5;
|
@apply px-4 rounded-l-none;
|
||||||
|
@apply bg-chalkboard-10 text-liquid-90;
|
||||||
|
@apply border-liquid-40/30;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark #code-mirror-override .cm-completionInfo {
|
||||||
|
@apply bg-liquid-120 text-liquid-50;
|
||||||
|
@apply border-liquid-90/60;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-tooltip-autocomplete li {
|
||||||
|
@apply px-2 py-1;
|
||||||
|
}
|
||||||
|
#code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
|
||||||
|
@apply bg-liquid-10 text-liquid-110;
|
||||||
|
}
|
||||||
|
.dark #code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
|
||||||
|
@apply bg-liquid-100 text-liquid-20;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-content {
|
#code-mirror-override .cm-content {
|
||||||
|
@ -1564,7 +1564,7 @@ const key = 'c'`
|
|||||||
start: code.indexOf('\n// this is a comment'),
|
start: code.indexOf('\n// this is a comment'),
|
||||||
end: code.indexOf('const key'),
|
end: code.indexOf('const key'),
|
||||||
value: {
|
value: {
|
||||||
type: 'block',
|
type: 'blockComment',
|
||||||
value: 'this is a comment',
|
value: 'this is a comment',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -1602,7 +1602,7 @@ const key = 'c'`
|
|||||||
start: 106,
|
start: 106,
|
||||||
end: 166,
|
end: 166,
|
||||||
value: {
|
value: {
|
||||||
type: 'block',
|
type: 'blockComment',
|
||||||
value: 'this is\n a comment\n spanning a few lines',
|
value: 'this is\n a comment\n spanning a few lines',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -1625,7 +1625,7 @@ const key = 'c'`
|
|||||||
start: 125,
|
start: 125,
|
||||||
end: 141,
|
end: 141,
|
||||||
value: {
|
value: {
|
||||||
type: 'block',
|
type: 'blockComment',
|
||||||
value: 'a comment',
|
value: 'a comment',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -1426,7 +1426,6 @@ export function transformAstSketchLines({
|
|||||||
selectionRanges.codeBasedSelections.forEach(({ range }, index) => {
|
selectionRanges.codeBasedSelections.forEach(({ range }, index) => {
|
||||||
const callBack = transformInfos?.[index].createNode
|
const callBack = transformInfos?.[index].createNode
|
||||||
const transformTo = transformInfos?.[index].tooltip
|
const transformTo = transformInfos?.[index].tooltip
|
||||||
console.log('transformTo', transformInfos)
|
|
||||||
if (!callBack || !transformTo) throw new Error('no callback helper')
|
if (!callBack || !transformTo) throw new Error('no callback helper')
|
||||||
|
|
||||||
const getNode = getNodeFromPathCurry(
|
const getNode = getNodeFromPathCurry(
|
||||||
|
@ -62,6 +62,17 @@ export const settingsCommandBarMeta: CommandBarMeta = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
'Set Text Wrapping': {
|
||||||
|
displayValue: (args: string[]) => 'Set whether text in the editor wraps',
|
||||||
|
args: [
|
||||||
|
{
|
||||||
|
name: 'textWrapping',
|
||||||
|
type: 'select',
|
||||||
|
defaultValue: 'textWrapping',
|
||||||
|
options: [{ name: 'On' }, { name: 'Off' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
'Set Onboarding Status': {
|
'Set Onboarding Status': {
|
||||||
hide: 'both',
|
hide: 'both',
|
||||||
},
|
},
|
||||||
@ -78,6 +89,7 @@ export const settingsMachine = createMachine(
|
|||||||
unitSystem: UnitSystem.Imperial,
|
unitSystem: UnitSystem.Imperial,
|
||||||
baseUnit: 'in' as BaseUnit,
|
baseUnit: 'in' as BaseUnit,
|
||||||
defaultDirectory: '',
|
defaultDirectory: '',
|
||||||
|
textWrapping: 'On' as 'On' | 'Off',
|
||||||
showDebugPanel: false,
|
showDebugPanel: false,
|
||||||
onboardingStatus: '',
|
onboardingStatus: '',
|
||||||
},
|
},
|
||||||
@ -142,6 +154,17 @@ export const settingsMachine = createMachine(
|
|||||||
target: 'idle',
|
target: 'idle',
|
||||||
internal: true,
|
internal: true,
|
||||||
},
|
},
|
||||||
|
'Set Text Wrapping': {
|
||||||
|
actions: [
|
||||||
|
assign({
|
||||||
|
textWrapping: (_, event) => event.data.textWrapping,
|
||||||
|
}),
|
||||||
|
'persistSettings',
|
||||||
|
'toastSuccess',
|
||||||
|
],
|
||||||
|
target: 'idle',
|
||||||
|
internal: true,
|
||||||
|
},
|
||||||
'Toggle Debug Panel': {
|
'Toggle Debug Panel': {
|
||||||
actions: [
|
actions: [
|
||||||
assign({
|
assign({
|
||||||
@ -182,6 +205,7 @@ export const settingsMachine = createMachine(
|
|||||||
data: { unitSystem: UnitSystem }
|
data: { unitSystem: UnitSystem }
|
||||||
}
|
}
|
||||||
| { type: 'Set Base Unit'; data: { baseUnit: BaseUnit } }
|
| { type: 'Set Base Unit'; data: { baseUnit: BaseUnit } }
|
||||||
|
| { type: 'Set Text Wrapping'; data: { textWrapping: 'On' | 'Off' } }
|
||||||
| { type: 'Set Onboarding Status'; data: { onboardingStatus: string } }
|
| { type: 'Set Onboarding Status'; data: { onboardingStatus: string } }
|
||||||
| { type: 'Toggle Debug Panel' },
|
| { type: 'Toggle Debug Panel' },
|
||||||
},
|
},
|
||||||
|
@ -18,6 +18,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
| 'Set Onboarding Status'
|
| 'Set Onboarding Status'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
@ -26,6 +27,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
| 'Set Onboarding Status'
|
| 'Set Onboarding Status'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
@ -34,6 +36,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Base Unit'
|
| 'Set Base Unit'
|
||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
|
2
src/wasm-lib/Cargo.lock
generated
2
src/wasm-lib/Cargo.lock
generated
@ -1094,7 +1094,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
version = "0.1.20"
|
version = "0.1.24"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bson",
|
"bson",
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
description = "KittyCAD Language"
|
description = "KittyCAD Language"
|
||||||
version = "0.1.20"
|
version = "0.1.24"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
@ -11,7 +11,7 @@ license = "MIT"
|
|||||||
anyhow = { version = "1.0.75", features = ["backtrace"] }
|
anyhow = { version = "1.0.75", features = ["backtrace"] }
|
||||||
clap = { version = "4.4.2", features = ["cargo", "derive", "env", "unicode"] }
|
clap = { version = "4.4.2", features = ["cargo", "derive", "env", "unicode"] }
|
||||||
dashmap = "5.5.3"
|
dashmap = "5.5.3"
|
||||||
derive-docs = { version = "0.1.1" }
|
derive-docs = { version = "0.1.3" }
|
||||||
#derive-docs = { path = "../derive-docs" }
|
#derive-docs = { path = "../derive-docs" }
|
||||||
kittycad = { version = "0.2.23", default-features = false, features = ["js"] }
|
kittycad = { version = "0.2.23", default-features = false, features = ["js"] }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
|
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
target
|
||||||
|
corpus
|
||||||
|
artifacts
|
||||||
|
coverage
|
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[package]
|
||||||
|
name = "kcl-lib-fuzz"
|
||||||
|
version = "0.0.0"
|
||||||
|
publish = false
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
cargo-fuzz = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
libfuzzer-sys = "0.4"
|
||||||
|
|
||||||
|
[dependencies.kcl-lib]
|
||||||
|
path = ".."
|
||||||
|
|
||||||
|
# Prevent this from interfering with workspaces
|
||||||
|
[workspace]
|
||||||
|
members = ["."]
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
debug = 1
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "parser"
|
||||||
|
path = "fuzz_targets/parser.rs"
|
||||||
|
test = false
|
||||||
|
doc = false
|
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#![no_main]
|
||||||
|
#[macro_use]
|
||||||
|
extern crate libfuzzer_sys;
|
||||||
|
extern crate kcl_lib;
|
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| {
|
||||||
|
if let Ok(s) = std::str::from_utf8(data) {
|
||||||
|
let tokens = kcl_lib::tokeniser::lexer(s);
|
||||||
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
|
if let Ok(_) = parser.ast() {
|
||||||
|
println!("OK");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
@ -27,12 +27,16 @@ pub struct Program {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Program {
|
impl Program {
|
||||||
pub fn recast(&self, indentation: &str, is_with_block: bool) -> String {
|
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
self.body
|
let indentation = options.get_indentation(indentation_level);
|
||||||
|
let result = self
|
||||||
|
.body
|
||||||
.iter()
|
.iter()
|
||||||
.map(|statement| match statement.clone() {
|
.map(|statement| match statement.clone() {
|
||||||
BodyItem::ExpressionStatement(expression_statement) => {
|
BodyItem::ExpressionStatement(expression_statement) => {
|
||||||
expression_statement.expression.recast(indentation, false)
|
expression_statement
|
||||||
|
.expression
|
||||||
|
.recast(options, indentation_level, false)
|
||||||
}
|
}
|
||||||
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration
|
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration
|
||||||
.declarations
|
.declarations
|
||||||
@ -43,56 +47,44 @@ impl Program {
|
|||||||
indentation,
|
indentation,
|
||||||
variable_declaration.kind,
|
variable_declaration.kind,
|
||||||
declaration.id.name,
|
declaration.id.name,
|
||||||
declaration.init.recast("", false)
|
declaration.init.recast(options, 0, false)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect::<String>(),
|
.collect::<String>(),
|
||||||
BodyItem::ReturnStatement(return_statement) => {
|
BodyItem::ReturnStatement(return_statement) => {
|
||||||
format!("{}return {}", indentation, return_statement.argument.recast("", false))
|
format!(
|
||||||
|
"{}return {}",
|
||||||
|
indentation,
|
||||||
|
return_statement.argument.recast(options, 0, false)
|
||||||
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, recast_str)| {
|
.map(|(index, recast_str)| {
|
||||||
let is_legit_custom_whitespace_or_comment = |s: String| s != " " && s != "\n" && s != " " && s != "\t";
|
let start_string = if index == 0 {
|
||||||
|
// We need to indent.
|
||||||
// determine the value of startString
|
|
||||||
let last_white_space_or_comment = if index > 0 {
|
|
||||||
let tmp = if let Some(non_code_node) = self.non_code_meta.none_code_nodes.get(&(index - 1)) {
|
|
||||||
non_code_node.format(indentation)
|
|
||||||
} else {
|
|
||||||
" ".to_string()
|
|
||||||
};
|
|
||||||
tmp
|
|
||||||
} else {
|
|
||||||
" ".to_string()
|
|
||||||
};
|
|
||||||
// indentation of this line will be covered by the previous if we're using a custom whitespace or comment
|
|
||||||
let mut start_string = if is_legit_custom_whitespace_or_comment(last_white_space_or_comment) {
|
|
||||||
String::new()
|
|
||||||
} else {
|
|
||||||
indentation.to_owned()
|
|
||||||
};
|
|
||||||
if index == 0 {
|
|
||||||
if let Some(start) = self.non_code_meta.start.clone() {
|
if let Some(start) = self.non_code_meta.start.clone() {
|
||||||
start_string = start.format(indentation);
|
start.format(&indentation)
|
||||||
} else {
|
} else {
|
||||||
start_string = indentation.to_owned();
|
indentation.to_string()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Do nothing, we already applied the indentation elsewhere.
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
|
||||||
// determine the value of endString
|
// determine the value of the end string
|
||||||
let maybe_line_break: String = if index == self.body.len() - 1 && !is_with_block {
|
// basically if we are inside a nested function we want to end with a new line
|
||||||
|
let maybe_line_break: String = if index == self.body.len() - 1 && indentation_level == 0 {
|
||||||
String::new()
|
String::new()
|
||||||
} else {
|
} else {
|
||||||
"\n".to_string()
|
"\n".to_string()
|
||||||
};
|
};
|
||||||
let mut custom_white_space_or_comment = match self.non_code_meta.none_code_nodes.get(&index) {
|
|
||||||
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(indentation),
|
let custom_white_space_or_comment = match self.non_code_meta.none_code_nodes.get(&index) {
|
||||||
|
Some(custom_white_space_or_comment) => custom_white_space_or_comment.format(&indentation),
|
||||||
None => String::new(),
|
None => String::new(),
|
||||||
};
|
};
|
||||||
if !is_legit_custom_whitespace_or_comment(custom_white_space_or_comment.clone()) {
|
|
||||||
custom_white_space_or_comment = String::new();
|
|
||||||
}
|
|
||||||
let end_string = if custom_white_space_or_comment.is_empty() {
|
let end_string = if custom_white_space_or_comment.is_empty() {
|
||||||
maybe_line_break
|
maybe_line_break
|
||||||
} else {
|
} else {
|
||||||
@ -103,7 +95,14 @@ impl Program {
|
|||||||
})
|
})
|
||||||
.collect::<String>()
|
.collect::<String>()
|
||||||
.trim()
|
.trim()
|
||||||
.to_string()
|
.to_string();
|
||||||
|
|
||||||
|
// Insert a final new line if the user wants it.
|
||||||
|
if options.insert_final_newline {
|
||||||
|
format!("{}\n", result)
|
||||||
|
} else {
|
||||||
|
result
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the body item that includes the given character position.
|
/// Returns the body item that includes the given character position.
|
||||||
@ -118,6 +117,18 @@ impl Program {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the body item that includes the given character position.
|
||||||
|
pub fn get_mut_body_item_for_position(&mut self, pos: usize) -> Option<&mut BodyItem> {
|
||||||
|
for item in &mut self.body {
|
||||||
|
let source_range: SourceRange = item.clone().into();
|
||||||
|
if source_range.contains(pos) {
|
||||||
|
return Some(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a value that includes the given character position.
|
/// Returns a value that includes the given character position.
|
||||||
/// This is a bit more recursive than `get_body_item_for_position`.
|
/// This is a bit more recursive than `get_body_item_for_position`.
|
||||||
pub fn get_value_for_position(&self, pos: usize) -> Option<&Value> {
|
pub fn get_value_for_position(&self, pos: usize) -> Option<&Value> {
|
||||||
@ -150,6 +161,82 @@ impl Program {
|
|||||||
|
|
||||||
symbols
|
symbols
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename the variable declaration at the given position.
|
||||||
|
pub fn rename_symbol(&mut self, new_name: &str, pos: usize) {
|
||||||
|
// The position must be within the variable declaration.
|
||||||
|
let mut old_name = None;
|
||||||
|
for item in &mut self.body {
|
||||||
|
match item {
|
||||||
|
BodyItem::ExpressionStatement(_expression_statement) => {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||||
|
if let Some(var_old_name) = variable_declaration.rename_symbol(new_name, pos) {
|
||||||
|
old_name = Some(var_old_name);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BodyItem::ReturnStatement(_return_statement) => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(old_name) = old_name {
|
||||||
|
// Now rename all the identifiers in the rest of the program.
|
||||||
|
self.rename_identifiers(&old_name, new_name);
|
||||||
|
} else {
|
||||||
|
// Okay so this was not a top level variable declaration.
|
||||||
|
// But it might be a variable declaration inside a function or function params.
|
||||||
|
// So we need to check that.
|
||||||
|
let Some(ref mut item) = self.get_mut_body_item_for_position(pos) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Recurse over the item.
|
||||||
|
let mut value = match item {
|
||||||
|
BodyItem::ExpressionStatement(ref mut expression_statement) => {
|
||||||
|
Some(&mut expression_statement.expression)
|
||||||
|
}
|
||||||
|
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||||
|
variable_declaration.get_mut_value_for_position(pos)
|
||||||
|
}
|
||||||
|
BodyItem::ReturnStatement(ref mut return_statement) => Some(&mut return_statement.argument),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check if we have a function expression.
|
||||||
|
if let Some(Value::FunctionExpression(ref mut function_expression)) = &mut value {
|
||||||
|
// Check if the params to the function expression contain the position.
|
||||||
|
for param in &mut function_expression.params {
|
||||||
|
let param_source_range: SourceRange = param.clone().into();
|
||||||
|
if param_source_range.contains(pos) {
|
||||||
|
let old_name = param.name.clone();
|
||||||
|
// Rename the param.
|
||||||
|
param.rename(&old_name, new_name);
|
||||||
|
// Now rename all the identifiers in the rest of the program.
|
||||||
|
function_expression.body.rename_identifiers(&old_name, new_name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
for item in &mut self.body {
|
||||||
|
match item {
|
||||||
|
BodyItem::ExpressionStatement(ref mut expression_statement) => {
|
||||||
|
expression_statement.expression.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
BodyItem::VariableDeclaration(ref mut variable_declaration) => {
|
||||||
|
variable_declaration.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
BodyItem::ReturnStatement(ref mut return_statement) => {
|
||||||
|
return_statement.argument.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ValueMeta {
|
pub trait ValueMeta {
|
||||||
@ -249,19 +336,18 @@ pub enum Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Value {
|
impl Value {
|
||||||
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
||||||
let indentation = indentation.to_string() + if is_in_pipe_expression { " " } else { "" };
|
|
||||||
match &self {
|
match &self {
|
||||||
Value::BinaryExpression(bin_exp) => bin_exp.recast(),
|
Value::BinaryExpression(bin_exp) => bin_exp.recast(options),
|
||||||
Value::ArrayExpression(array_exp) => array_exp.recast(&indentation, is_in_pipe_expression),
|
Value::ArrayExpression(array_exp) => array_exp.recast(options, indentation_level, is_in_pipe),
|
||||||
Value::ObjectExpression(ref obj_exp) => obj_exp.recast(&indentation, is_in_pipe_expression),
|
Value::ObjectExpression(ref obj_exp) => obj_exp.recast(options, indentation_level, is_in_pipe),
|
||||||
Value::MemberExpression(mem_exp) => mem_exp.recast(),
|
Value::MemberExpression(mem_exp) => mem_exp.recast(),
|
||||||
Value::Literal(literal) => literal.recast(),
|
Value::Literal(literal) => literal.recast(),
|
||||||
Value::FunctionExpression(func_exp) => func_exp.recast(&indentation),
|
Value::FunctionExpression(func_exp) => func_exp.recast(options, indentation_level),
|
||||||
Value::CallExpression(call_exp) => call_exp.recast(&indentation, is_in_pipe_expression),
|
Value::CallExpression(call_exp) => call_exp.recast(options, indentation_level, is_in_pipe),
|
||||||
Value::Identifier(ident) => ident.name.to_string(),
|
Value::Identifier(ident) => ident.name.to_string(),
|
||||||
Value::PipeExpression(pipe_exp) => pipe_exp.recast(&indentation),
|
Value::PipeExpression(pipe_exp) => pipe_exp.recast(options, indentation_level),
|
||||||
Value::UnaryExpression(unary_exp) => unary_exp.recast(),
|
Value::UnaryExpression(unary_exp) => unary_exp.recast(options),
|
||||||
Value::PipeSubstitution(_) => crate::parser::PIPE_SUBSTITUTION_OPERATOR.to_string(),
|
Value::PipeSubstitution(_) => crate::parser::PIPE_SUBSTITUTION_OPERATOR.to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -317,6 +403,29 @@ impl Value {
|
|||||||
Value::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
Value::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
match self {
|
||||||
|
Value::Literal(_literal) => {}
|
||||||
|
Value::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||||
|
Value::BinaryExpression(ref mut binary_expression) => {
|
||||||
|
binary_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
Value::FunctionExpression(_function_identifier) => {}
|
||||||
|
Value::CallExpression(ref mut call_expression) => call_expression.rename_identifiers(old_name, new_name),
|
||||||
|
Value::PipeExpression(ref mut pipe_expression) => pipe_expression.rename_identifiers(old_name, new_name),
|
||||||
|
Value::PipeSubstitution(_) => {}
|
||||||
|
Value::ArrayExpression(ref mut array_expression) => array_expression.rename_identifiers(old_name, new_name),
|
||||||
|
Value::ObjectExpression(ref mut object_expression) => {
|
||||||
|
object_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
Value::MemberExpression(ref mut member_expression) => {
|
||||||
|
member_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
Value::UnaryExpression(ref mut unary_expression) => unary_expression.rename_identifiers(old_name, new_name),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Value> for crate::executor::SourceRange {
|
impl From<Value> for crate::executor::SourceRange {
|
||||||
@ -355,13 +464,13 @@ impl From<&BinaryPart> for crate::executor::SourceRange {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BinaryPart {
|
impl BinaryPart {
|
||||||
fn recast(&self, indentation: &str) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
match &self {
|
match &self {
|
||||||
BinaryPart::Literal(literal) => literal.recast(),
|
BinaryPart::Literal(literal) => literal.recast(),
|
||||||
BinaryPart::Identifier(identifier) => identifier.name.to_string(),
|
BinaryPart::Identifier(identifier) => identifier.name.to_string(),
|
||||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(),
|
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(options),
|
||||||
BinaryPart::CallExpression(call_expression) => call_expression.recast(indentation, false),
|
BinaryPart::CallExpression(call_expression) => call_expression.recast(options, indentation_level, false),
|
||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(options),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -422,6 +531,23 @@ impl BinaryPart {
|
|||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
match self {
|
||||||
|
BinaryPart::Literal(_literal) => {}
|
||||||
|
BinaryPart::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||||
|
BinaryPart::BinaryExpression(ref mut binary_expression) => {
|
||||||
|
binary_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
BinaryPart::CallExpression(ref mut call_expression) => {
|
||||||
|
call_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
||||||
|
unary_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
@ -436,17 +562,17 @@ pub struct NoneCodeNode {
|
|||||||
impl NoneCodeNode {
|
impl NoneCodeNode {
|
||||||
pub fn value(&self) -> String {
|
pub fn value(&self) -> String {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
NoneCodeValue::Inline { value } => value.clone(),
|
NoneCodeValue::InlineComment { value } => value.clone(),
|
||||||
NoneCodeValue::Block { value } => value.clone(),
|
NoneCodeValue::BlockComment { value } => value.clone(),
|
||||||
NoneCodeValue::NewLineBlock { value } => value.clone(),
|
NoneCodeValue::NewLineBlockComment { value } => value.clone(),
|
||||||
NoneCodeValue::NewLine => "\n\n".to_string(),
|
NoneCodeValue::NewLine => "\n\n".to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format(&self, indentation: &str) -> String {
|
pub fn format(&self, indentation: &str) -> String {
|
||||||
match &self.value {
|
match &self.value {
|
||||||
NoneCodeValue::Inline { value } => format!(" // {}\n", value),
|
NoneCodeValue::InlineComment { value } => format!(" // {}\n", value),
|
||||||
NoneCodeValue::Block { value } => {
|
NoneCodeValue::BlockComment { value } => {
|
||||||
let add_start_new_line = if self.start == 0 { "" } else { "\n" };
|
let add_start_new_line = if self.start == 0 { "" } else { "\n" };
|
||||||
if value.contains('\n') {
|
if value.contains('\n') {
|
||||||
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
|
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
|
||||||
@ -454,7 +580,7 @@ impl NoneCodeNode {
|
|||||||
format!("{}{}// {}\n", add_start_new_line, indentation, value)
|
format!("{}{}// {}\n", add_start_new_line, indentation, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NoneCodeValue::NewLineBlock { value } => {
|
NoneCodeValue::NewLineBlockComment { value } => {
|
||||||
let add_start_new_line = if self.start == 0 { "" } else { "\n\n" };
|
let add_start_new_line = if self.start == 0 { "" } else { "\n\n" };
|
||||||
if value.contains('\n') {
|
if value.contains('\n') {
|
||||||
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
|
format!("{}{}/* {} */\n", add_start_new_line, indentation, value)
|
||||||
@ -471,9 +597,29 @@ impl NoneCodeNode {
|
|||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
#[serde(tag = "type", rename_all = "camelCase")]
|
#[serde(tag = "type", rename_all = "camelCase")]
|
||||||
pub enum NoneCodeValue {
|
pub enum NoneCodeValue {
|
||||||
Inline { value: String },
|
/// An inline comment.
|
||||||
Block { value: String },
|
/// An example of this is the following: `1 + 1 // This is an inline comment`.
|
||||||
NewLineBlock { value: String },
|
InlineComment {
|
||||||
|
value: String,
|
||||||
|
},
|
||||||
|
/// A block comment.
|
||||||
|
/// An example of this is the following:
|
||||||
|
/// ```python,no_run
|
||||||
|
/// /* This is a
|
||||||
|
/// block comment */
|
||||||
|
/// 1 + 1
|
||||||
|
/// ```
|
||||||
|
/// Now this is important. The block comment is attached to the next line.
|
||||||
|
/// This is always the case. Also the block comment doesnt have a new line above it.
|
||||||
|
/// If it did it would be a `NewLineBlockComment`.
|
||||||
|
BlockComment {
|
||||||
|
value: String,
|
||||||
|
},
|
||||||
|
/// A block comment that has a new line above it.
|
||||||
|
/// The user explicitly added a new line above the block comment.
|
||||||
|
NewLineBlockComment {
|
||||||
|
value: String,
|
||||||
|
},
|
||||||
// A new line like `\n\n` NOT a new line like `\n`.
|
// A new line like `\n\n` NOT a new line like `\n`.
|
||||||
// This is also not a comment.
|
// This is also not a comment.
|
||||||
NewLine,
|
NewLine,
|
||||||
@ -539,13 +685,13 @@ pub struct CallExpression {
|
|||||||
impl_value_meta!(CallExpression);
|
impl_value_meta!(CallExpression);
|
||||||
|
|
||||||
impl CallExpression {
|
impl CallExpression {
|
||||||
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
||||||
format!(
|
format!(
|
||||||
"{}({})",
|
"{}({})",
|
||||||
self.callee.name,
|
self.callee.name,
|
||||||
self.arguments
|
self.arguments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arg| arg.recast(indentation, is_in_pipe_expression))
|
.map(|arg| arg.recast(options, indentation_level, is_in_pipe))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
)
|
)
|
||||||
@ -671,6 +817,15 @@ impl CallExpression {
|
|||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
self.callee.rename(old_name, new_name);
|
||||||
|
|
||||||
|
for arg in &mut self.arguments {
|
||||||
|
arg.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A function declaration.
|
/// A function declaration.
|
||||||
@ -723,6 +878,50 @@ impl VariableDeclaration {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a value that includes the given character position.
|
||||||
|
pub fn get_mut_value_for_position(&mut self, pos: usize) -> Option<&mut Value> {
|
||||||
|
for declaration in &mut self.declarations {
|
||||||
|
let source_range: SourceRange = declaration.clone().into();
|
||||||
|
if source_range.contains(pos) {
|
||||||
|
return Some(&mut declaration.init);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Rename the variable declaration at the given position.
|
||||||
|
/// This returns the old name of the variable, if it found one.
|
||||||
|
pub fn rename_symbol(&mut self, new_name: &str, pos: usize) -> Option<String> {
|
||||||
|
// The position must be within the variable declaration.
|
||||||
|
let source_range: SourceRange = self.clone().into();
|
||||||
|
if !source_range.contains(pos) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
for declaration in &mut self.declarations {
|
||||||
|
let declaration_source_range: SourceRange = declaration.id.clone().into();
|
||||||
|
if declaration_source_range.contains(pos) {
|
||||||
|
let old_name = declaration.id.name.clone();
|
||||||
|
declaration.id.name = new_name.to_string();
|
||||||
|
return Some(old_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
for declaration in &mut self.declarations {
|
||||||
|
// Skip the init for the variable with the new name since it is the one we are renaming.
|
||||||
|
if declaration.id.name == new_name {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
declaration.init.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_lsp_symbols(&self, code: &str) -> Vec<DocumentSymbol> {
|
pub fn get_lsp_symbols(&self, code: &str) -> Vec<DocumentSymbol> {
|
||||||
let mut symbols = vec![];
|
let mut symbols = vec![];
|
||||||
|
|
||||||
@ -839,7 +1038,9 @@ impl VariableKind {
|
|||||||
pub struct VariableDeclarator {
|
pub struct VariableDeclarator {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
|
/// The identifier of the variable.
|
||||||
pub id: Identifier,
|
pub id: Identifier,
|
||||||
|
/// The value of the variable.
|
||||||
pub init: Value,
|
pub init: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -901,6 +1102,15 @@ pub struct Identifier {
|
|||||||
|
|
||||||
impl_value_meta!(Identifier);
|
impl_value_meta!(Identifier);
|
||||||
|
|
||||||
|
impl Identifier {
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
if self.name == old_name {
|
||||||
|
self.name = new_name.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
@ -923,27 +1133,35 @@ pub struct ArrayExpression {
|
|||||||
impl_value_meta!(ArrayExpression);
|
impl_value_meta!(ArrayExpression);
|
||||||
|
|
||||||
impl ArrayExpression {
|
impl ArrayExpression {
|
||||||
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
||||||
let flat_recast = format!(
|
let flat_recast = format!(
|
||||||
"[{}]",
|
"[{}]",
|
||||||
self.elements
|
self.elements
|
||||||
.iter()
|
.iter()
|
||||||
.map(|el| el.recast("", false))
|
.map(|el| el.recast(options, 0, false))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
);
|
);
|
||||||
let max_array_length = 40;
|
let max_array_length = 40;
|
||||||
if flat_recast.len() > max_array_length {
|
if flat_recast.len() > max_array_length {
|
||||||
let indentation = indentation.to_string() + " ";
|
let inner_indentation = if is_in_pipe {
|
||||||
|
options.get_indentation_offset_pipe(indentation_level + 1)
|
||||||
|
} else {
|
||||||
|
options.get_indentation(indentation_level + 1)
|
||||||
|
};
|
||||||
format!(
|
format!(
|
||||||
"[\n{}{}\n{}]",
|
"[\n{}{}\n{}]",
|
||||||
indentation,
|
inner_indentation,
|
||||||
self.elements
|
self.elements
|
||||||
.iter()
|
.iter()
|
||||||
.map(|el| el.recast(&indentation, false))
|
.map(|el| el.recast(options, indentation_level, false))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(format!(",\n{}", indentation).as_str()),
|
.join(format!(",\n{}", inner_indentation).as_str()),
|
||||||
if is_in_pipe_expression { " " } else { "" }
|
if is_in_pipe {
|
||||||
|
options.get_indentation_offset_pipe(indentation_level)
|
||||||
|
} else {
|
||||||
|
options.get_indentation(indentation_level)
|
||||||
|
},
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
flat_recast
|
flat_recast
|
||||||
@ -1019,6 +1237,13 @@ impl ArrayExpression {
|
|||||||
}],
|
}],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
for element in &mut self.elements {
|
||||||
|
element.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
@ -1031,27 +1256,35 @@ pub struct ObjectExpression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ObjectExpression {
|
impl ObjectExpression {
|
||||||
fn recast(&self, indentation: &str, is_in_pipe_expression: bool) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize, is_in_pipe: bool) -> String {
|
||||||
let flat_recast = format!(
|
let flat_recast = format!(
|
||||||
"{{ {} }}",
|
"{{ {} }}",
|
||||||
self.properties
|
self.properties
|
||||||
.iter()
|
.iter()
|
||||||
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast("", false)) })
|
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast(options, 0, false)) })
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
);
|
);
|
||||||
let max_array_length = 40;
|
let max_array_length = 40;
|
||||||
if flat_recast.len() > max_array_length {
|
if flat_recast.len() > max_array_length {
|
||||||
let indentation = indentation.to_owned() + " ";
|
let inner_indentation = if is_in_pipe {
|
||||||
|
options.get_indentation_offset_pipe(indentation_level + 1)
|
||||||
|
} else {
|
||||||
|
options.get_indentation(indentation_level + 1)
|
||||||
|
};
|
||||||
format!(
|
format!(
|
||||||
"{{\n{}{}\n{}}}",
|
"{{\n{}{}\n{}}}",
|
||||||
indentation,
|
inner_indentation,
|
||||||
self.properties
|
self.properties
|
||||||
.iter()
|
.iter()
|
||||||
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast("", is_in_pipe_expression)) })
|
.map(|prop| { format!("{}: {}", prop.key.name, prop.value.recast(options, 0, false)) })
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(format!(",\n{}", indentation).as_str()),
|
.join(format!(",\n{}", inner_indentation).as_str()),
|
||||||
if is_in_pipe_expression { " " } else { "" }
|
if is_in_pipe {
|
||||||
|
options.get_indentation_offset_pipe(indentation_level)
|
||||||
|
} else {
|
||||||
|
options.get_indentation(indentation_level)
|
||||||
|
},
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
flat_recast
|
flat_recast
|
||||||
@ -1125,6 +1358,13 @@ impl ObjectExpression {
|
|||||||
}],
|
}],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
for property in &mut self.properties {
|
||||||
|
property.value.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl_value_meta!(ObjectExpression);
|
impl_value_meta!(ObjectExpression);
|
||||||
@ -1342,6 +1582,21 @@ impl MemberExpression {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
match &mut self.object {
|
||||||
|
MemberObject::MemberExpression(ref mut member_expression) => {
|
||||||
|
member_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
|
MemberObject::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||||
|
}
|
||||||
|
|
||||||
|
match &mut self.property {
|
||||||
|
LiteralIdentifier::Identifier(ref mut identifier) => identifier.rename(old_name, new_name),
|
||||||
|
LiteralIdentifier::Literal(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
@ -1370,7 +1625,7 @@ impl BinaryExpression {
|
|||||||
self.operator.precedence()
|
self.operator.precedence()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recast(&self) -> String {
|
fn recast(&self, options: &FormatOptions) -> String {
|
||||||
let maybe_wrap_it = |a: String, doit: bool| -> String {
|
let maybe_wrap_it = |a: String, doit: bool| -> String {
|
||||||
if doit {
|
if doit {
|
||||||
format!("({})", a)
|
format!("({})", a)
|
||||||
@ -1393,9 +1648,9 @@ impl BinaryExpression {
|
|||||||
|
|
||||||
format!(
|
format!(
|
||||||
"{} {} {}",
|
"{} {} {}",
|
||||||
maybe_wrap_it(self.left.recast(""), should_wrap_left),
|
maybe_wrap_it(self.left.recast(options, 0), should_wrap_left),
|
||||||
self.operator,
|
self.operator,
|
||||||
maybe_wrap_it(self.right.recast(""), should_wrap_right)
|
maybe_wrap_it(self.right.recast(options, 0), should_wrap_right)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1458,6 +1713,12 @@ impl BinaryExpression {
|
|||||||
}],
|
}],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
self.left.rename_identifiers(old_name, new_name);
|
||||||
|
self.right.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_json_number_as_f64(j: &serde_json::Value, source_range: SourceRange) -> Result<f64, KclError> {
|
pub fn parse_json_number_as_f64(j: &serde_json::Value, source_range: SourceRange) -> Result<f64, KclError> {
|
||||||
@ -1532,8 +1793,8 @@ pub struct UnaryExpression {
|
|||||||
impl_value_meta!(UnaryExpression);
|
impl_value_meta!(UnaryExpression);
|
||||||
|
|
||||||
impl UnaryExpression {
|
impl UnaryExpression {
|
||||||
fn recast(&self) -> String {
|
fn recast(&self, options: &FormatOptions) -> String {
|
||||||
format!("{}{}", &self.operator, self.argument.recast(""))
|
format!("{}{}", &self.operator, self.argument.recast(options, 0))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_result(
|
pub fn get_result(
|
||||||
@ -1565,6 +1826,11 @@ impl UnaryExpression {
|
|||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
self.argument.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, FromStr, Display)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||||
@ -1595,13 +1861,13 @@ pub struct PipeExpression {
|
|||||||
impl_value_meta!(PipeExpression);
|
impl_value_meta!(PipeExpression);
|
||||||
|
|
||||||
impl PipeExpression {
|
impl PipeExpression {
|
||||||
fn recast(&self, indentation: &str) -> String {
|
fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
self.body
|
self.body
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, statement)| {
|
.map(|(index, statement)| {
|
||||||
let indentation = indentation.to_string() + " ";
|
let indentation = options.get_indentation(indentation_level + 1);
|
||||||
let mut s = statement.recast(&indentation, true);
|
let mut s = statement.recast(options, indentation_level + 1, true);
|
||||||
let non_code_meta = self.non_code_meta.clone();
|
let non_code_meta = self.non_code_meta.clone();
|
||||||
if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) {
|
if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) {
|
||||||
s += non_code_meta_value.format(&indentation).trim_end_matches('\n')
|
s += non_code_meta_value.format(&indentation).trim_end_matches('\n')
|
||||||
@ -1641,6 +1907,13 @@ impl PipeExpression {
|
|||||||
pipe_info.index = 0;
|
pipe_info.index = 0;
|
||||||
execute_pipe_body(memory, &self.body, pipe_info, self.into(), engine)
|
execute_pipe_body(memory, &self.body, pipe_info, self.into(), engine)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename all identifiers that have the old name to the new given name.
|
||||||
|
fn rename_identifiers(&mut self, old_name: &str, new_name: &str) {
|
||||||
|
for statement in &mut self.body {
|
||||||
|
statement.rename_identifiers(old_name, new_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute_pipe_body(
|
fn execute_pipe_body(
|
||||||
@ -1706,17 +1979,16 @@ pub struct FunctionExpression {
|
|||||||
impl_value_meta!(FunctionExpression);
|
impl_value_meta!(FunctionExpression);
|
||||||
|
|
||||||
impl FunctionExpression {
|
impl FunctionExpression {
|
||||||
pub fn recast(&self, indentation: &str) -> String {
|
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
|
||||||
format!(
|
format!(
|
||||||
"({}) => {{\n{}{}{}\n}}",
|
"({}) => {{\n{}{}\n}}",
|
||||||
self.params
|
self.params
|
||||||
.iter()
|
.iter()
|
||||||
.map(|param| param.name.clone())
|
.map(|param| param.name.clone())
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", "),
|
.join(", "),
|
||||||
indentation,
|
options.get_indentation(indentation_level + 1),
|
||||||
" ",
|
self.body.recast(options, indentation_level + 1)
|
||||||
self.body.recast(" ", true)
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1756,6 +2028,58 @@ pub enum Hover {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Format options.
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct FormatOptions {
|
||||||
|
/// Size of a tab in spaces.
|
||||||
|
pub tab_size: usize,
|
||||||
|
/// Prefer tabs over spaces.
|
||||||
|
pub use_tabs: bool,
|
||||||
|
/// How to handle the final newline in the file.
|
||||||
|
/// If true, ensure file ends with a newline.
|
||||||
|
/// If false, ensure file does not end with a newline.
|
||||||
|
pub insert_final_newline: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for FormatOptions {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormatOptions {
|
||||||
|
/// Define the default format options.
|
||||||
|
/// We use 2 spaces for indentation.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
tab_size: 2,
|
||||||
|
use_tabs: false,
|
||||||
|
insert_final_newline: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the indentation string for the given level.
|
||||||
|
pub fn get_indentation(&self, level: usize) -> String {
|
||||||
|
if self.use_tabs {
|
||||||
|
"\t".repeat(level)
|
||||||
|
} else {
|
||||||
|
" ".repeat(level * self.tab_size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the indentation string for the given level.
|
||||||
|
/// But offset the pipe operator (and a space) by one level.
|
||||||
|
pub fn get_indentation_offset_pipe(&self, level: usize) -> String {
|
||||||
|
if self.use_tabs {
|
||||||
|
"\t".repeat(level + 1)
|
||||||
|
} else {
|
||||||
|
" ".repeat(level * self.tab_size) + " ".repeat(PIPE_OPERATOR.len() + 1).as_str()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -1797,7 +2121,7 @@ show(part001)"#;
|
|||||||
let some_program: crate::abstract_syntax_tree_types::Program =
|
let some_program: crate::abstract_syntax_tree_types::Program =
|
||||||
serde_json::from_str(some_program_string).unwrap();
|
serde_json::from_str(some_program_string).unwrap();
|
||||||
|
|
||||||
let recasted = some_program.recast("", false);
|
let recasted = some_program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"const part001 = startSketchAt('default')
|
r#"const part001 = startSketchAt('default')
|
||||||
@ -1816,7 +2140,7 @@ show(part001)"#
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"const part001 = startSketchAt([0.0, 5.0])
|
r#"const part001 = startSketchAt([0.0, 5.0])
|
||||||
@ -1834,7 +2158,7 @@ show(part001)"#
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"const part001 = startSketchAt([0.0, 5.0])
|
r#"const part001 = startSketchAt([0.0, 5.0])
|
||||||
@ -1852,7 +2176,7 @@ show(part001)"#
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"const part001 = startSketchAt([0.0, 5.0])
|
r#"const part001 = startSketchAt([0.0, 5.0])
|
||||||
@ -1877,7 +2201,7 @@ show(part001)"#
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"const myFn = () => {
|
r#"const myFn = () => {
|
||||||
@ -1913,7 +2237,7 @@ const mySk1 = startSketchAt([0, 0])
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
recasted,
|
recasted,
|
||||||
r#"// comment at start
|
r#"// comment at start
|
||||||
@ -1951,7 +2275,7 @@ show(part001)"#;
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1964,12 +2288,19 @@ const yo = {
|
|||||||
anum: 2,
|
anum: 2,
|
||||||
identifier: three,
|
identifier: three,
|
||||||
binExp: 4 + 5
|
binExp: 4 + 5
|
||||||
}"#;
|
}
|
||||||
|
const yo = [
|
||||||
|
1,
|
||||||
|
" 2,",
|
||||||
|
"three",
|
||||||
|
4 + 5,
|
||||||
|
" hey oooooo really long long long"
|
||||||
|
]"#;
|
||||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1987,7 +2318,7 @@ const things = "things"
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string.trim());
|
assert_eq!(recasted, some_program_string.trim());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2005,7 +2336,125 @@ const things = "things"
|
|||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
let recasted = program.recast("", false);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string.trim());
|
assert_eq!(recasted, some_program_string.trim());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recast_array_new_line_in_pipe() {
|
||||||
|
let some_program_string = r#"const myVar = 3
|
||||||
|
const myVar2 = 5
|
||||||
|
const myVar3 = 6
|
||||||
|
const myAng = 40
|
||||||
|
const myAng2 = 134
|
||||||
|
const part001 = startSketchAt([0, 0])
|
||||||
|
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|
||||||
|
|> angledLineToX([
|
||||||
|
-angleToMatchLengthX('seg01', myVar, %),
|
||||||
|
myVar
|
||||||
|
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||||
|
|> angledLineToY([
|
||||||
|
-angleToMatchLengthY('seg01', myVar, %),
|
||||||
|
myVar
|
||||||
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
||||||
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
|
assert_eq!(recasted, some_program_string);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recast_array_new_line_in_pipe_custom() {
|
||||||
|
let some_program_string = r#"const myVar = 3
|
||||||
|
const myVar2 = 5
|
||||||
|
const myVar3 = 6
|
||||||
|
const myAng = 40
|
||||||
|
const myAng2 = 134
|
||||||
|
const part001 = startSketchAt([0, 0])
|
||||||
|
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|
||||||
|
|> angledLineToX([
|
||||||
|
-angleToMatchLengthX('seg01', myVar, %),
|
||||||
|
myVar
|
||||||
|
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||||
|
|> angledLineToY([
|
||||||
|
-angleToMatchLengthY('seg01', myVar, %),
|
||||||
|
myVar
|
||||||
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||||
|
"#;
|
||||||
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
|
let recasted = program.recast(
|
||||||
|
&FormatOptions {
|
||||||
|
tab_size: 3,
|
||||||
|
use_tabs: false,
|
||||||
|
insert_final_newline: true,
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
assert_eq!(recasted, some_program_string);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recast_after_rename_std() {
|
||||||
|
let some_program_string = r#"const part001 = startSketchAt([0.0000000000, 5.0000000000])
|
||||||
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
|
|
||||||
|
const part002 = "part002"
|
||||||
|
const things = [part001, 0.0]
|
||||||
|
let blah = 1
|
||||||
|
const foo = false
|
||||||
|
let baz = {a: 1, part001: "thing"}
|
||||||
|
|
||||||
|
fn ghi = (part001) => {
|
||||||
|
return part001
|
||||||
|
}
|
||||||
|
|
||||||
|
show(part001)"#;
|
||||||
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let mut program = parser.ast().unwrap();
|
||||||
|
program.rename_symbol("mySuperCoolPart", 6);
|
||||||
|
|
||||||
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
|
assert_eq!(
|
||||||
|
recasted,
|
||||||
|
r#"const mySuperCoolPart = startSketchAt([0.0, 5.0])
|
||||||
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
|
|
||||||
|
const part002 = "part002"
|
||||||
|
const things = [mySuperCoolPart, 0.0]
|
||||||
|
let blah = 1
|
||||||
|
const foo = false
|
||||||
|
let baz = { a: 1, part001: "thing" }
|
||||||
|
|
||||||
|
fn ghi = (part001) => {
|
||||||
|
return part001
|
||||||
|
}
|
||||||
|
|
||||||
|
show(mySuperCoolPart)"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_recast_after_rename_fn_args() {
|
||||||
|
let some_program_string = r#"fn ghi = (x, y, z) => {
|
||||||
|
return x
|
||||||
|
}"#;
|
||||||
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let mut program = parser.ast().unwrap();
|
||||||
|
program.rename_symbol("newName", 10);
|
||||||
|
|
||||||
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
|
assert_eq!(
|
||||||
|
recasted,
|
||||||
|
r#"fn ghi = (newName, y, z) => {
|
||||||
|
return newName
|
||||||
|
}"#
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -642,7 +642,7 @@ pub fn execute(
|
|||||||
for (index, param) in function_expression.params.iter().enumerate() {
|
for (index, param) in function_expression.params.iter().enumerate() {
|
||||||
fn_memory.add(
|
fn_memory.add(
|
||||||
¶m.name,
|
¶m.name,
|
||||||
args.clone().get(index).unwrap().clone(),
|
args.get(index).unwrap().clone(),
|
||||||
param.into(),
|
param.into(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
@ -228,8 +228,8 @@ impl ReversePolishNotation {
|
|||||||
.collect::<Vec<Token>>(),
|
.collect::<Vec<Token>>(),
|
||||||
);
|
);
|
||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
} else if current_token.value == ")" {
|
} else if current_token.value == ")" && !self.operators.is_empty() {
|
||||||
if !self.operators.is_empty() && self.operators[self.operators.len() - 1].value != "(" {
|
if self.operators[self.operators.len() - 1].value != "(" {
|
||||||
// pop operators off the stack and push them to postFix until we find the matching '('
|
// pop operators off the stack and push them to postFix until we find the matching '('
|
||||||
let rpn = ReversePolishNotation::new(
|
let rpn = ReversePolishNotation::new(
|
||||||
&self.parser.tokens,
|
&self.parser.tokens,
|
||||||
|
@ -336,17 +336,26 @@ impl Parser {
|
|||||||
value: if start_end_string.starts_with("\n\n") && is_new_line_comment {
|
value: if start_end_string.starts_with("\n\n") && is_new_line_comment {
|
||||||
// Preserve if they want a whitespace line before the comment.
|
// Preserve if they want a whitespace line before the comment.
|
||||||
// But let's just allow one.
|
// But let's just allow one.
|
||||||
NoneCodeValue::NewLineBlock { value: full_string }
|
NoneCodeValue::NewLineBlockComment { value: full_string }
|
||||||
} else if is_new_line_comment {
|
} else if is_new_line_comment {
|
||||||
NoneCodeValue::Block { value: full_string }
|
NoneCodeValue::BlockComment { value: full_string }
|
||||||
} else {
|
} else {
|
||||||
NoneCodeValue::Inline { value: full_string }
|
NoneCodeValue::InlineComment { value: full_string }
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
Ok((Some(node), end_index - 1))
|
Ok((Some(node), end_index - 1))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_meaningful_token(&self, index: usize, offset: Option<usize>) -> Result<TokenReturnWithNonCode, KclError> {
|
fn next_meaningful_token(&self, index: usize, offset: Option<usize>) -> Result<TokenReturnWithNonCode, KclError> {
|
||||||
|
// There is no next meaningful token.
|
||||||
|
if index >= self.tokens.len() - 1 {
|
||||||
|
return Ok(TokenReturnWithNonCode {
|
||||||
|
token: None,
|
||||||
|
index: self.tokens.len() - 1,
|
||||||
|
non_code_node: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
let new_index = index + offset.unwrap_or(1);
|
let new_index = index + offset.unwrap_or(1);
|
||||||
let Ok(token) = self.get_token(new_index) else {
|
let Ok(token) = self.get_token(new_index) else {
|
||||||
return Ok(TokenReturnWithNonCode {
|
return Ok(TokenReturnWithNonCode {
|
||||||
@ -405,7 +414,7 @@ impl Parser {
|
|||||||
if found_another_opening_brace {
|
if found_another_opening_brace {
|
||||||
return self.find_closing_brace(index + 1, brace_count + 1, search_opening_brace);
|
return self.find_closing_brace(index + 1, brace_count + 1, search_opening_brace);
|
||||||
}
|
}
|
||||||
if found_another_closing_brace {
|
if found_another_closing_brace && brace_count > 0 {
|
||||||
return self.find_closing_brace(index + 1, brace_count - 1, search_opening_brace);
|
return self.find_closing_brace(index + 1, brace_count - 1, search_opening_brace);
|
||||||
}
|
}
|
||||||
// non-brace token, increment and continue
|
// non-brace token, increment and continue
|
||||||
@ -610,6 +619,12 @@ impl Parser {
|
|||||||
fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
|
fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
let mut keys_info = self.collect_object_keys(index, None)?;
|
let mut keys_info = self.collect_object_keys(index, None)?;
|
||||||
|
if keys_info.is_empty() {
|
||||||
|
return Err(KclError::Syntax(KclErrorDetails {
|
||||||
|
source_ranges: vec![current_token.into()],
|
||||||
|
message: "expected to be started on a identifier or literal".to_string(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
let last_key = keys_info[keys_info.len() - 1].clone();
|
let last_key = keys_info[keys_info.len() - 1].clone();
|
||||||
let first_key = keys_info.remove(0);
|
let first_key = keys_info.remove(0);
|
||||||
let root = self.make_identifier(index)?;
|
let root = self.make_identifier(index)?;
|
||||||
@ -679,10 +694,14 @@ impl Parser {
|
|||||||
return Ok(index);
|
return Ok(index);
|
||||||
}
|
}
|
||||||
let next_right = self.next_meaningful_token(maybe_operator.index, None)?;
|
let next_right = self.next_meaningful_token(maybe_operator.index, None)?;
|
||||||
|
if next_right.index != index {
|
||||||
self.find_end_of_binary_expression(next_right.index)
|
self.find_end_of_binary_expression(next_right.index)
|
||||||
} else {
|
} else {
|
||||||
Ok(index)
|
Ok(index)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
Ok(index)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_value(&self, index: usize) -> Result<ValueReturn, KclError> {
|
fn make_value(&self, index: usize) -> Result<ValueReturn, KclError> {
|
||||||
@ -847,6 +866,8 @@ impl Parser {
|
|||||||
fn make_array_expression(&self, index: usize) -> Result<ArrayReturn, KclError> {
|
fn make_array_expression(&self, index: usize) -> Result<ArrayReturn, KclError> {
|
||||||
let opening_brace_token = self.get_token(index)?;
|
let opening_brace_token = self.get_token(index)?;
|
||||||
let first_element_token = self.next_meaningful_token(index, None)?;
|
let first_element_token = self.next_meaningful_token(index, None)?;
|
||||||
|
// Make sure there is a closing brace.
|
||||||
|
let _closing_brace = self.find_closing_brace(index, 0, "")?;
|
||||||
let array_elements = self.make_array_elements(first_element_token.index, Vec::new())?;
|
let array_elements = self.make_array_elements(first_element_token.index, Vec::new())?;
|
||||||
Ok(ArrayReturn {
|
Ok(ArrayReturn {
|
||||||
expression: ArrayExpression {
|
expression: ArrayExpression {
|
||||||
@ -1063,6 +1084,8 @@ impl Parser {
|
|||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
let brace_token = self.next_meaningful_token(index, None)?;
|
let brace_token = self.next_meaningful_token(index, None)?;
|
||||||
let callee = self.make_identifier(index)?;
|
let callee = self.make_identifier(index)?;
|
||||||
|
// Make sure there is a closing brace.
|
||||||
|
let _closing_brace_token = self.find_closing_brace(brace_token.index, 0, "")?;
|
||||||
let args = self.make_arguments(brace_token.index, vec![])?;
|
let args = self.make_arguments(brace_token.index, vec![])?;
|
||||||
let closing_brace_token = self.get_token(args.last_index)?;
|
let closing_brace_token = self.get_token(args.last_index)?;
|
||||||
let function = if let Some(stdlib_fn) = self.stdlib.get(&callee.name) {
|
let function = if let Some(stdlib_fn) = self.stdlib.get(&callee.name) {
|
||||||
@ -1105,7 +1128,13 @@ impl Parser {
|
|||||||
) -> Result<VariableDeclaratorsReturn, KclError> {
|
) -> Result<VariableDeclaratorsReturn, KclError> {
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
let assignment = self.next_meaningful_token(index, None)?;
|
let assignment = self.next_meaningful_token(index, None)?;
|
||||||
if let Some(assignment_token) = assignment.token {
|
let Some(assignment_token) = assignment.token else {
|
||||||
|
return Err(KclError::Unimplemented(KclErrorDetails {
|
||||||
|
source_ranges: vec![current_token.clone().into()],
|
||||||
|
message: format!("Unexpected token {}", current_token.value),
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
let contents_start_token = self.next_meaningful_token(assignment.index, None)?;
|
let contents_start_token = self.next_meaningful_token(assignment.index, None)?;
|
||||||
let pipe_start_index = if assignment_token.token_type == TokenType::Operator {
|
let pipe_start_index = if assignment_token.token_type == TokenType::Operator {
|
||||||
contents_start_token.index
|
contents_start_token.index
|
||||||
@ -1135,12 +1164,6 @@ impl Parser {
|
|||||||
declarations,
|
declarations,
|
||||||
last_index,
|
last_index,
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
Err(KclError::Unimplemented(KclErrorDetails {
|
|
||||||
source_ranges: vec![current_token.clone().into()],
|
|
||||||
message: format!("Unexpected token {} ", current_token.value),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_variable_declaration(&self, index: usize) -> Result<VariableDeclarationResult, KclError> {
|
fn make_variable_declaration(&self, index: usize) -> Result<VariableDeclarationResult, KclError> {
|
||||||
@ -1192,6 +1215,12 @@ impl Parser {
|
|||||||
fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
|
fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
let next_token = self.next_meaningful_token(index, None)?;
|
let next_token = self.next_meaningful_token(index, None)?;
|
||||||
|
if next_token.token.is_none() {
|
||||||
|
return Err(KclError::Syntax(KclErrorDetails {
|
||||||
|
source_ranges: vec![current_token.into()],
|
||||||
|
message: "expected another token".to_string(),
|
||||||
|
}));
|
||||||
|
}
|
||||||
let argument = self.make_value(next_token.index)?;
|
let argument = self.make_value(next_token.index)?;
|
||||||
let argument_token = self.get_token(argument.last_index)?;
|
let argument_token = self.get_token(argument.last_index)?;
|
||||||
Ok(UnaryExpressionResult {
|
Ok(UnaryExpressionResult {
|
||||||
@ -1232,7 +1261,6 @@ impl Parser {
|
|||||||
return Ok(ExpressionStatementResult {
|
return Ok(ExpressionStatementResult {
|
||||||
expression: ExpressionStatement {
|
expression: ExpressionStatement {
|
||||||
start: current_token.start,
|
start: current_token.start,
|
||||||
// end: call_expression.last_index,
|
|
||||||
end,
|
end,
|
||||||
expression: Value::CallExpression(Box::new(call_expression.expression)),
|
expression: Value::CallExpression(Box::new(call_expression.expression)),
|
||||||
},
|
},
|
||||||
@ -1314,6 +1342,8 @@ impl Parser {
|
|||||||
|
|
||||||
fn make_object_expression(&self, index: usize) -> Result<ObjectExpressionResult, KclError> {
|
fn make_object_expression(&self, index: usize) -> Result<ObjectExpressionResult, KclError> {
|
||||||
let opening_brace_token = self.get_token(index)?;
|
let opening_brace_token = self.get_token(index)?;
|
||||||
|
// Make sure there is a closing brace.
|
||||||
|
let _closing_brace = self.find_closing_brace(index, 0, "")?;
|
||||||
let first_property_token = self.next_meaningful_token(index, None)?;
|
let first_property_token = self.next_meaningful_token(index, None)?;
|
||||||
let object_properties = self.make_object_properties(first_property_token.index, vec![])?;
|
let object_properties = self.make_object_properties(first_property_token.index, vec![])?;
|
||||||
Ok(ObjectExpressionResult {
|
Ok(ObjectExpressionResult {
|
||||||
@ -1665,7 +1695,7 @@ const key = 'c'"#,
|
|||||||
Some(NoneCodeNode {
|
Some(NoneCodeNode {
|
||||||
start: 38,
|
start: 38,
|
||||||
end: 60,
|
end: 60,
|
||||||
value: NoneCodeValue::Block {
|
value: NoneCodeValue::BlockComment {
|
||||||
value: "this is a comment".to_string(),
|
value: "this is a comment".to_string(),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
@ -1687,7 +1717,7 @@ const key = 'c'"#,
|
|||||||
Some(NoneCodeNode {
|
Some(NoneCodeNode {
|
||||||
start: 106,
|
start: 106,
|
||||||
end: 166,
|
end: 166,
|
||||||
value: NoneCodeValue::Block {
|
value: NoneCodeValue::BlockComment {
|
||||||
value: "this is\n a comment\n spanning a few lines".to_string(),
|
value: "this is\n a comment\n spanning a few lines".to_string(),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
@ -2716,4 +2746,139 @@ show(mySk1)"#;
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert!(result.err().unwrap().to_string().contains("file is empty"));
|
assert!(result.err().unwrap().to_string().contains("file is empty"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_half_pipe_small() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
"const secondExtrude = startSketchAt([0,0])
|
||||||
|
|",
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_half_pipe() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
"const height = 10
|
||||||
|
|
||||||
|
const firstExtrude = startSketchAt([0,0])
|
||||||
|
|> line([0, 8], %)
|
||||||
|
|> line([20, 0], %)
|
||||||
|
|> line([0, -8], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(2, %)
|
||||||
|
|
||||||
|
show(firstExtrude)
|
||||||
|
|
||||||
|
const secondExtrude = startSketchAt([0,0])
|
||||||
|
|",
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_greater_bang() {
|
||||||
|
let tokens = crate::tokeniser::lexer(">!");
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_z_percent_parens() {
|
||||||
|
let tokens = crate::tokeniser::lexer("z%)");
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_parens_unicode() {
|
||||||
|
let tokens = crate::tokeniser::lexer("(ޜ");
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_nested_open_brackets() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"
|
||||||
|
z(-[["#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_weird_new_line_function() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"z
|
||||||
|
(--#"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_weird_lots_of_fancy_brackets() {
|
||||||
|
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_weird_close_before_open() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"fn)n
|
||||||
|
e
|
||||||
|
["#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result
|
||||||
|
.err()
|
||||||
|
.unwrap()
|
||||||
|
.to_string()
|
||||||
|
.contains("expected to be started on a identifier or literal"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_weird_close_before_nada() {
|
||||||
|
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.err().unwrap().to_string().contains("expected another token"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_weird_lots_of_slashes() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"J///////////o//+///////////P++++*++++++P///////˟
|
||||||
|
++4"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result
|
||||||
|
.err()
|
||||||
|
.unwrap()
|
||||||
|
.to_string()
|
||||||
|
.contains("unexpected end of expression"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -233,6 +233,7 @@ impl LanguageServer for Backend {
|
|||||||
document_symbol_provider: Some(OneOf::Left(true)),
|
document_symbol_provider: Some(OneOf::Left(true)),
|
||||||
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
||||||
inlay_hint_provider: Some(OneOf::Left(true)),
|
inlay_hint_provider: Some(OneOf::Left(true)),
|
||||||
|
rename_provider: Some(OneOf::Left(true)),
|
||||||
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
||||||
SemanticTokensRegistrationOptions {
|
SemanticTokensRegistrationOptions {
|
||||||
text_document_registration_options: {
|
text_document_registration_options: {
|
||||||
@ -552,19 +553,14 @@ impl LanguageServer for Backend {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
// Now recast it.
|
// Now recast it.
|
||||||
// Make spaces for the tab size.
|
let recast = ast.recast(
|
||||||
/*let mut tab_size = String::new();
|
&crate::abstract_syntax_tree_types::FormatOptions {
|
||||||
for _ in 0..params.options.tab_size {
|
tab_size: params.options.tab_size as usize,
|
||||||
tab_size.push(' ');
|
insert_final_newline: params.options.insert_final_newline.unwrap_or(false),
|
||||||
}*/
|
use_tabs: !params.options.insert_spaces,
|
||||||
// TODO: use the tab size.
|
},
|
||||||
let mut recast = ast.recast("", false).trim().to_string();
|
0,
|
||||||
if let Some(insert_final_newline) = params.options.insert_final_newline {
|
);
|
||||||
if insert_final_newline {
|
|
||||||
recast.push('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let source_range = SourceRange([0, current_code.len() - 1]);
|
let source_range = SourceRange([0, current_code.len() - 1]);
|
||||||
let range = source_range.to_lsp_range(¤t_code);
|
let range = source_range.to_lsp_range(¤t_code);
|
||||||
Ok(Some(vec![TextEdit {
|
Ok(Some(vec![TextEdit {
|
||||||
@ -572,6 +568,43 @@ impl LanguageServer for Backend {
|
|||||||
range,
|
range,
|
||||||
}]))
|
}]))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
||||||
|
let filename = params.text_document_position.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(current_code) = self.current_code_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse the ast.
|
||||||
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
|
let tokens = crate::tokeniser::lexer(¤t_code);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let Ok(mut ast) = parser.ast() else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Let's convert the position to a character index.
|
||||||
|
let pos = position_to_char_index(params.text_document_position.position, ¤t_code);
|
||||||
|
// Now let's perform the rename on the ast.
|
||||||
|
ast.rename_symbol(¶ms.new_name, pos);
|
||||||
|
// Now recast it.
|
||||||
|
let recast = ast.recast(&Default::default(), 0);
|
||||||
|
let source_range = SourceRange([0, current_code.len() - 1]);
|
||||||
|
let range = source_range.to_lsp_range(¤t_code);
|
||||||
|
Ok(Some(WorkspaceEdit {
|
||||||
|
changes: Some(HashMap::from([(
|
||||||
|
params.text_document_position.text_document.uri,
|
||||||
|
vec![TextEdit {
|
||||||
|
new_text: recast,
|
||||||
|
range,
|
||||||
|
}],
|
||||||
|
)])),
|
||||||
|
document_changes: None,
|
||||||
|
change_annotations: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get completions from our stdlib.
|
/// Get completions from our stdlib.
|
||||||
|
@ -206,8 +206,8 @@ fn is_block_comment(character: &str) -> bool {
|
|||||||
BLOCKCOMMENT.is_match(character)
|
BLOCKCOMMENT.is_match(character)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_first(str: &str, regex: &Regex) -> Option<String> {
|
fn match_first(s: &str, regex: &Regex) -> Option<String> {
|
||||||
regex.find(str).map(|the_match| the_match.as_str().to_string())
|
regex.find(s).map(|the_match| the_match.as_str().to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
||||||
@ -219,8 +219,8 @@ fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
|
||||||
let str_from_index = &str[start_index..];
|
let str_from_index = &s.chars().skip(start_index).collect::<String>();
|
||||||
if is_string(str_from_index) {
|
if is_string(str_from_index) {
|
||||||
return Some(make_token(
|
return Some(make_token(
|
||||||
TokenType::String,
|
TokenType::String,
|
||||||
@ -348,21 +348,22 @@ fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lexer(str: &str) -> Vec<Token> {
|
fn recursively_tokenise(s: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
|
||||||
fn recursively_tokenise(str: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
|
if current_index >= s.len() {
|
||||||
if current_index >= str.len() {
|
|
||||||
return previous_tokens;
|
return previous_tokens;
|
||||||
}
|
}
|
||||||
let token = return_token_at_index(str, current_index);
|
let token = return_token_at_index(s, current_index);
|
||||||
let Some(token) = token else {
|
let Some(token) = token else {
|
||||||
return recursively_tokenise(str, current_index + 1, previous_tokens);
|
return recursively_tokenise(s, current_index + 1, previous_tokens);
|
||||||
};
|
};
|
||||||
let mut new_tokens = previous_tokens;
|
let mut new_tokens = previous_tokens;
|
||||||
let token_length = token.value.len();
|
let token_length = token.value.len();
|
||||||
new_tokens.push(token);
|
new_tokens.push(token);
|
||||||
recursively_tokenise(str, current_index + token_length, new_tokens)
|
recursively_tokenise(s, current_index + token_length, new_tokens)
|
||||||
}
|
}
|
||||||
recursively_tokenise(str, 0, Vec::new())
|
|
||||||
|
pub fn lexer(s: &str) -> Vec<Token> {
|
||||||
|
recursively_tokenise(s, 0, Vec::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -76,7 +76,8 @@ pub fn recast_wasm(json_str: &str) -> Result<JsValue, JsError> {
|
|||||||
let program: kcl_lib::abstract_syntax_tree_types::Program =
|
let program: kcl_lib::abstract_syntax_tree_types::Program =
|
||||||
serde_json::from_str(json_str).map_err(JsError::from)?;
|
serde_json::from_str(json_str).map_err(JsError::from)?;
|
||||||
|
|
||||||
let result = program.recast("", false);
|
// Use the default options until we integrate into the UI the ability to change them.
|
||||||
|
let result = program.recast(&Default::default(), 0);
|
||||||
Ok(JsValue::from_serde(&result)?)
|
Ok(JsValue::from_serde(&result)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1530,10 +1530,10 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
|
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
|
||||||
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
|
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
|
||||||
|
|
||||||
"@kittycad/lib@^0.0.35":
|
"@kittycad/lib@^0.0.36":
|
||||||
version "0.0.35"
|
version "0.0.36"
|
||||||
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.35.tgz#bde8868048f9fd53f8309e7308aeba622898b935"
|
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.36.tgz#7b9676c975bc629f227d41897b38e7d73280db71"
|
||||||
integrity sha512-qM8AyP2QUlDfPWNxb1Fs/Pq9AebGVDN1OHjByxbGomKCy0jFdN2TsyDdhQH/CAZGfBCgPEfr5bq6rkUBGSXcNw==
|
integrity sha512-4bVXTaIzpSRuJAuLbAD/CWWTns7H/IxogPj0827n8mwXDkj+65EBCNXhJGWRkMG2CeTVJVk1LSWKlaHE+ToxGA==
|
||||||
dependencies:
|
dependencies:
|
||||||
node-fetch "3.3.2"
|
node-fetch "3.3.2"
|
||||||
openapi-types "^12.0.0"
|
openapi-types "^12.0.0"
|
||||||
|
Reference in New Issue
Block a user