Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
3da6fc3b7e | |||
34dd15ead7 | |||
b3d441e9d6 | |||
4b3dc3756c | |||
10027b98b5 | |||
da17dad63b | |||
fba6c422a8 | |||
0b4b93932d | |||
f42900ec46 | |||
eeca624ba6 | |||
84d08bad16 | |||
1181f33e9d | |||
797e200d08 | |||
d2f231066b | |||
86d40c964f | |||
2604449239 | |||
e992a96d3b | |||
22c4406105 | |||
ad3f0fda6a |
@ -1 +1 @@
|
|||||||
src/wasm-lib/pkg/wasm_lib.js
|
src/wasm-lib/*
|
||||||
|
65
.github/workflows/ci.yml
vendored
65
.github/workflows/ci.yml
vendored
@ -13,17 +13,31 @@ jobs:
|
|||||||
check-format:
|
check-format:
|
||||||
runs-on: 'ubuntu-20.04'
|
runs-on: 'ubuntu-20.04'
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: '.nvmrc'
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
- run: yarn install
|
- run: yarn install
|
||||||
|
|
||||||
- run: yarn fmt-check
|
- run: yarn fmt-check
|
||||||
|
|
||||||
|
check-types:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
|
- run: yarn install
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
|
- run: yarn build:wasm
|
||||||
|
- run: yarn tsc
|
||||||
|
|
||||||
|
|
||||||
build-test-web:
|
build-test-web:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
@ -36,12 +50,15 @@ jobs:
|
|||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version-file: '.nvmrc'
|
node-version-file: '.nvmrc'
|
||||||
|
cache: 'yarn'
|
||||||
|
|
||||||
- run: yarn install
|
- run: yarn install
|
||||||
|
|
||||||
- run: yarn build:wasm
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
- run: yarn tsc
|
- run: yarn build:wasm
|
||||||
|
|
||||||
- run: yarn simpleserver:ci
|
- run: yarn simpleserver:ci
|
||||||
|
|
||||||
@ -49,14 +66,12 @@ jobs:
|
|||||||
|
|
||||||
- run: yarn test:cov
|
- run: yarn test:cov
|
||||||
|
|
||||||
- run: yarn test:rust
|
|
||||||
|
|
||||||
- id: export_version
|
- id: export_version
|
||||||
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
|
run: echo "version=`cat package.json | jq -r '.version'`" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
|
||||||
build-apps:
|
build-apps:
|
||||||
needs: [check-format, build-test-web]
|
needs: [check-format, build-test-web, check-types]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@ -87,6 +102,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
workspaces: './src-tauri -> target'
|
workspaces: './src-tauri -> target'
|
||||||
|
|
||||||
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
with:
|
||||||
|
workspaces: "./src/wasm-lib"
|
||||||
|
|
||||||
- name: wasm prep
|
- name: wasm prep
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@ -110,22 +129,27 @@ jobs:
|
|||||||
- name: Fix format
|
- name: Fix format
|
||||||
run: yarn fmt
|
run: yarn fmt
|
||||||
|
|
||||||
|
- name: install apple silicon target mac
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
run: |
|
||||||
|
rustup target add aarch64-apple-darwin
|
||||||
|
|
||||||
- name: Build the app for the current platform (no upload)
|
- name: Build the app for the current platform (no upload)
|
||||||
uses: tauri-apps/tauri-action@v0
|
uses: tauri-apps/tauri-action@v0
|
||||||
env:
|
env:
|
||||||
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
|
||||||
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
|
||||||
|
with:
|
||||||
|
args: ${{ matrix.os == 'macos-latest' && '--target universal-apple-darwin' || '' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: src-tauri/target/release/bundle/*/*
|
path: ${{ matrix.os == 'macos-latest' && 'src-tauri/target/universal-apple-darwin/release/bundle/*/*' || 'src-tauri/target/release/bundle/*/*' }}
|
||||||
|
|
||||||
|
|
||||||
publish-apps-release:
|
publish-apps-release:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
needs: [build-test-web, build-apps]
|
needs: [build-test-web, build-apps]
|
||||||
env:
|
env:
|
||||||
VERSION_NO_V: ${{ needs.build-test-web.outputs.version }}
|
VERSION_NO_V: ${{ needs.build-test-web.outputs.version }}
|
||||||
@ -135,8 +159,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate the update static endpoint
|
- name: Generate the update static endpoint
|
||||||
run: |
|
run: |
|
||||||
ls -l artifact
|
ls -l artifact/*/*itty*
|
||||||
ls -l artifact/*
|
|
||||||
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
|
DARWIN_SIG=`cat artifact/macos/*.app.tar.gz.sig`
|
||||||
LINUX_SIG=`cat artifact/appimage/*.AppImage.tar.gz.sig`
|
LINUX_SIG=`cat artifact/appimage/*.AppImage.tar.gz.sig`
|
||||||
WINDOWS_SIG=`cat artifact/nsis/*.nsis.zip.sig`
|
WINDOWS_SIG=`cat artifact/nsis/*.nsis.zip.sig`
|
||||||
@ -144,11 +167,11 @@ jobs:
|
|||||||
jq --null-input \
|
jq --null-input \
|
||||||
--arg version "v${VERSION_NO_V}" \
|
--arg version "v${VERSION_NO_V}" \
|
||||||
--arg darwin_sig "$DARWIN_SIG" \
|
--arg darwin_sig "$DARWIN_SIG" \
|
||||||
--arg darwin_url "$RELEASE_DIR/macos/kittycad-modeling-app.app.tar.gz" \
|
--arg darwin_url "$RELEASE_DIR/macos/KittyCAD%20Modeling.app.tar.gz" \
|
||||||
--arg linux_sig "$LINUX_SIG" \
|
--arg linux_sig "$LINUX_SIG" \
|
||||||
--arg linux_url "$RELEASE_DIR/appimage/kittycad-modeling-app_${VERSION_NO_V}_amd64.AppImage.tar.gz" \
|
--arg linux_url "$RELEASE_DIR/appimage/kittycad-modeling_${VERSION_NO_V}_amd64.AppImage.tar.gz" \
|
||||||
--arg windows_sig "$WINDOWS_SIG" \
|
--arg windows_sig "$WINDOWS_SIG" \
|
||||||
--arg windows_url "$RELEASE_DIR/nsis/kittycad-modeling-app_${VERSION_NO_V}_x64-setup.nsis.zip" \
|
--arg windows_url "$RELEASE_DIR/nsis/KittyCAD%20Modeling_${VERSION_NO_V}_x64-setup.nsis.zip" \
|
||||||
'{
|
'{
|
||||||
"version": $version,
|
"version": $version,
|
||||||
"platforms": {
|
"platforms": {
|
||||||
@ -156,6 +179,10 @@ jobs:
|
|||||||
"signature": $darwin_sig,
|
"signature": $darwin_sig,
|
||||||
"url": $darwin_url
|
"url": $darwin_url
|
||||||
},
|
},
|
||||||
|
"darwin-aarch64": {
|
||||||
|
"signature": $darwin_sig,
|
||||||
|
"url": $darwin_url
|
||||||
|
},
|
||||||
"linux-x86_64": {
|
"linux-x86_64": {
|
||||||
"signature": $linux_sig,
|
"signature": $linux_sig,
|
||||||
"url": $linux_url
|
"url": $linux_url
|
||||||
@ -182,7 +209,7 @@ jobs:
|
|||||||
uses: google-github-actions/upload-cloud-storage@v1.0.3
|
uses: google-github-actions/upload-cloud-storage@v1.0.3
|
||||||
with:
|
with:
|
||||||
path: artifact
|
path: artifact
|
||||||
glob: '*/kittycad-modeling-app*'
|
glob: '*/*itty*'
|
||||||
parent: false
|
parent: false
|
||||||
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
|
destination: dl.kittycad.io/releases/modeling-app/v${{ env.VERSION_NO_V }}
|
||||||
|
|
||||||
@ -195,4 +222,4 @@ jobs:
|
|||||||
- name: Upload release files to Github
|
- name: Upload release files to Github
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
with:
|
with:
|
||||||
files: artifact/*/kittycad-modeling-app*
|
files: artifact/*/*itty*
|
||||||
|
@ -5,3 +5,5 @@ coverage
|
|||||||
# Ignore Rust projects:
|
# Ignore Rust projects:
|
||||||
*.rs
|
*.rs
|
||||||
target
|
target
|
||||||
|
src/wasm-lib/pkg
|
||||||
|
src/wasm-lib/kcl/bindings
|
||||||
|
21
README.md
21
README.md
@ -86,3 +86,24 @@ The PR may serve as a place to discuss the human-readable changelog and extra QA
|
|||||||
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
|
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
|
||||||
|
|
||||||
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
|
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
|
||||||
|
|
||||||
|
## Fuzzing the parser
|
||||||
|
|
||||||
|
Make sure you install cargo fuzz:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cargo install cargo-fuzz
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ cd src/wasm-lib/kcl
|
||||||
|
|
||||||
|
# list the fuzz targets
|
||||||
|
$ cargo fuzz list
|
||||||
|
|
||||||
|
# run the parser fuzzer
|
||||||
|
$ cargo +nightly fuzz run parser
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information on fuzzing you can check out
|
||||||
|
[this guide](https://rust-fuzz.github.io/book/cargo-fuzz.html).
|
||||||
|
22642
docs/kcl.json
22642
docs/kcl.json
File diff suppressed because it is too large
Load Diff
5427
docs/kcl.md
5427
docs/kcl.md
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@ -1,31 +1,35 @@
|
|||||||
{
|
{
|
||||||
"name": "untitled-app",
|
"name": "untitled-app",
|
||||||
"version": "0.3.0",
|
"version": "0.4.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@codemirror/autocomplete": "^6.9.0",
|
||||||
"@fortawesome/fontawesome-svg-core": "^6.4.2",
|
"@fortawesome/fontawesome-svg-core": "^6.4.2",
|
||||||
"@fortawesome/free-brands-svg-icons": "^6.4.2",
|
"@fortawesome/free-brands-svg-icons": "^6.4.2",
|
||||||
"@fortawesome/free-solid-svg-icons": "^6.4.2",
|
"@fortawesome/free-solid-svg-icons": "^6.4.2",
|
||||||
"@fortawesome/react-fontawesome": "^0.2.0",
|
"@fortawesome/react-fontawesome": "^0.2.0",
|
||||||
"@headlessui/react": "^1.7.13",
|
"@headlessui/react": "^1.7.13",
|
||||||
"@headlessui/tailwindcss": "^0.2.0",
|
"@headlessui/tailwindcss": "^0.2.0",
|
||||||
"@kittycad/lib": "^0.0.35",
|
"@kittycad/lib": "^0.0.36",
|
||||||
|
"@lezer/javascript": "^1.4.7",
|
||||||
|
"@open-rpc/client-js": "^1.8.1",
|
||||||
"@react-hook/resize-observer": "^1.2.6",
|
"@react-hook/resize-observer": "^1.2.6",
|
||||||
"@sentry/react": "^7.65.0",
|
"@sentry/react": "^7.65.0",
|
||||||
"@tauri-apps/api": "^1.3.0",
|
"@tauri-apps/api": "^1.3.0",
|
||||||
"@testing-library/jest-dom": "^5.14.1",
|
"@testing-library/jest-dom": "^5.14.1",
|
||||||
"@testing-library/react": "^13.0.0",
|
"@testing-library/react": "^13.0.0",
|
||||||
"@testing-library/user-event": "^13.2.1",
|
"@testing-library/user-event": "^13.2.1",
|
||||||
|
"@ts-stack/markdown": "^1.5.0",
|
||||||
"@types/node": "^16.7.13",
|
"@types/node": "^16.7.13",
|
||||||
"@types/react": "^18.0.0",
|
"@types/react": "^18.0.0",
|
||||||
"@types/react-dom": "^18.0.0",
|
"@types/react-dom": "^18.0.0",
|
||||||
"@uiw/codemirror-extensions-langs": "^4.21.9",
|
"@uiw/react-codemirror": "^4.21.13",
|
||||||
"@uiw/react-codemirror": "^4.15.1",
|
|
||||||
"@xstate/react": "^3.2.2",
|
"@xstate/react": "^3.2.2",
|
||||||
"crypto-js": "^4.1.1",
|
"crypto-js": "^4.1.1",
|
||||||
"formik": "^2.4.3",
|
"formik": "^2.4.3",
|
||||||
"fuse.js": "^6.6.2",
|
"fuse.js": "^6.6.2",
|
||||||
"http-server": "^14.1.1",
|
"http-server": "^14.1.1",
|
||||||
|
"json-rpc-2.0": "^1.6.0",
|
||||||
"re-resizable": "^6.9.9",
|
"re-resizable": "^6.9.9",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
@ -43,6 +47,8 @@
|
|||||||
"typescript": "^4.4.2",
|
"typescript": "^4.4.2",
|
||||||
"uuid": "^9.0.0",
|
"uuid": "^9.0.0",
|
||||||
"vitest": "^0.34.1",
|
"vitest": "^0.34.1",
|
||||||
|
"vscode-jsonrpc": "^8.1.0",
|
||||||
|
"vscode-languageserver-protocol": "^3.17.3",
|
||||||
"wasm-pack": "^0.12.1",
|
"wasm-pack": "^0.12.1",
|
||||||
"web-vitals": "^2.1.0",
|
"web-vitals": "^2.1.0",
|
||||||
"ws": "^8.13.0",
|
"ws": "^8.13.0",
|
||||||
@ -92,6 +98,7 @@
|
|||||||
"@babel/preset-env": "^7.22.9",
|
"@babel/preset-env": "^7.22.9",
|
||||||
"@tauri-apps/cli": "^1.3.1",
|
"@tauri-apps/cli": "^1.3.1",
|
||||||
"@types/crypto-js": "^4.1.1",
|
"@types/crypto-js": "^4.1.1",
|
||||||
|
"@types/debounce": "^1.2.1",
|
||||||
"@types/isomorphic-fetch": "^0.0.36",
|
"@types/isomorphic-fetch": "^0.0.36",
|
||||||
"@types/react-modal": "^3.16.0",
|
"@types/react-modal": "^3.16.0",
|
||||||
"@types/uuid": "^9.0.1",
|
"@types/uuid": "^9.0.1",
|
||||||
|
@ -7,8 +7,8 @@
|
|||||||
"distDir": "../build"
|
"distDir": "../build"
|
||||||
},
|
},
|
||||||
"package": {
|
"package": {
|
||||||
"productName": "kittycad-modeling-app",
|
"productName": "kittycad-modeling",
|
||||||
"version": "0.3.0"
|
"version": "0.4.0"
|
||||||
},
|
},
|
||||||
"tauri": {
|
"tauri": {
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
|
7
src-tauri/tauri.macos.conf.json
Normal file
7
src-tauri/tauri.macos.conf.json
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
{
|
||||||
|
"$schema": "../node_modules/@tauri-apps/cli/schema.json",
|
||||||
|
"package": {
|
||||||
|
"productName": "KittyCAD Modeling"
|
||||||
|
}
|
||||||
|
}
|
7
src-tauri/tauri.windows.conf.json
Normal file
7
src-tauri/tauri.windows.conf.json
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
{
|
||||||
|
"$schema": "../node_modules/@tauri-apps/cli/schema.json",
|
||||||
|
"package": {
|
||||||
|
"productName": "KittyCAD Modeling"
|
||||||
|
}
|
||||||
|
}
|
119
src/App.tsx
119
src/App.tsx
@ -10,21 +10,23 @@ import { DebugPanel } from './components/DebugPanel'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { asyncParser } from './lang/abstractSyntaxTree'
|
import { asyncParser } from './lang/abstractSyntaxTree'
|
||||||
import { _executor } from './lang/executor'
|
import { _executor } from './lang/executor'
|
||||||
import CodeMirror from '@uiw/react-codemirror'
|
import CodeMirror, { Extension } from '@uiw/react-codemirror'
|
||||||
import { langs } from '@uiw/codemirror-extensions-langs'
|
|
||||||
import { linter, lintGutter } from '@codemirror/lint'
|
import { linter, lintGutter } from '@codemirror/lint'
|
||||||
import { ViewUpdate } from '@codemirror/view'
|
import { ViewUpdate, EditorView } from '@codemirror/view'
|
||||||
import {
|
import {
|
||||||
lineHighlightField,
|
lineHighlightField,
|
||||||
addLineHighlight,
|
addLineHighlight,
|
||||||
} from './editor/highlightextension'
|
} from './editor/highlightextension'
|
||||||
import { PaneType, Selections, useStore } from './useStore'
|
import { PaneType, Selections, useStore } from './useStore'
|
||||||
|
import Server from './editor/lsp/server'
|
||||||
|
import Client from './editor/lsp/client'
|
||||||
import { Logs, KCLErrors } from './components/Logs'
|
import { Logs, KCLErrors } from './components/Logs'
|
||||||
import { CollapsiblePanel } from './components/CollapsiblePanel'
|
import { CollapsiblePanel } from './components/CollapsiblePanel'
|
||||||
import { MemoryPanel } from './components/MemoryPanel'
|
import { MemoryPanel } from './components/MemoryPanel'
|
||||||
import { useHotKeyListener } from './hooks/useHotKeyListener'
|
import { useHotKeyListener } from './hooks/useHotKeyListener'
|
||||||
import { Stream } from './components/Stream'
|
import { Stream } from './components/Stream'
|
||||||
import ModalContainer from 'react-modal-promise'
|
import ModalContainer from 'react-modal-promise'
|
||||||
|
import { FromServer, IntoServer } from './editor/lsp/codec'
|
||||||
import {
|
import {
|
||||||
EngineCommand,
|
EngineCommand,
|
||||||
EngineCommandManager,
|
EngineCommandManager,
|
||||||
@ -50,6 +52,9 @@ import { IndexLoaderData } from './Router'
|
|||||||
import { toast } from 'react-hot-toast'
|
import { toast } from 'react-hot-toast'
|
||||||
import { useGlobalStateContext } from 'hooks/useGlobalStateContext'
|
import { useGlobalStateContext } from 'hooks/useGlobalStateContext'
|
||||||
import { onboardingPaths } from 'routes/Onboarding'
|
import { onboardingPaths } from 'routes/Onboarding'
|
||||||
|
import { LanguageServerClient } from 'editor/lsp'
|
||||||
|
import kclLanguage from 'editor/lsp/language'
|
||||||
|
import { CSSRuleObject } from 'tailwindcss/types/config'
|
||||||
|
|
||||||
export function App() {
|
export function App() {
|
||||||
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
||||||
@ -74,20 +79,20 @@ export function App() {
|
|||||||
setArtifactMap,
|
setArtifactMap,
|
||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
setEngineCommandManager,
|
setEngineCommandManager,
|
||||||
|
highlightRange,
|
||||||
setHighlightRange,
|
setHighlightRange,
|
||||||
setCursor2,
|
setCursor2,
|
||||||
sourceRangeMap,
|
sourceRangeMap,
|
||||||
setMediaStream,
|
setMediaStream,
|
||||||
setIsStreamReady,
|
setIsStreamReady,
|
||||||
isStreamReady,
|
isStreamReady,
|
||||||
|
isLSPServerReady,
|
||||||
|
setIsLSPServerReady,
|
||||||
isMouseDownInStream,
|
isMouseDownInStream,
|
||||||
cmdId,
|
|
||||||
setCmdId,
|
|
||||||
formatCode,
|
formatCode,
|
||||||
openPanes,
|
openPanes,
|
||||||
setOpenPanes,
|
setOpenPanes,
|
||||||
didDragInStream,
|
didDragInStream,
|
||||||
setDidDragInStream,
|
|
||||||
setStreamDimensions,
|
setStreamDimensions,
|
||||||
streamDimensions,
|
streamDimensions,
|
||||||
} = useStore((s) => ({
|
} = useStore((s) => ({
|
||||||
@ -108,6 +113,7 @@ export function App() {
|
|||||||
setArtifactMap: s.setArtifactNSourceRangeMaps,
|
setArtifactMap: s.setArtifactNSourceRangeMaps,
|
||||||
engineCommandManager: s.engineCommandManager,
|
engineCommandManager: s.engineCommandManager,
|
||||||
setEngineCommandManager: s.setEngineCommandManager,
|
setEngineCommandManager: s.setEngineCommandManager,
|
||||||
|
highlightRange: s.highlightRange,
|
||||||
setHighlightRange: s.setHighlightRange,
|
setHighlightRange: s.setHighlightRange,
|
||||||
isShiftDown: s.isShiftDown,
|
isShiftDown: s.isShiftDown,
|
||||||
setCursor: s.setCursor,
|
setCursor: s.setCursor,
|
||||||
@ -116,15 +122,14 @@ export function App() {
|
|||||||
setMediaStream: s.setMediaStream,
|
setMediaStream: s.setMediaStream,
|
||||||
isStreamReady: s.isStreamReady,
|
isStreamReady: s.isStreamReady,
|
||||||
setIsStreamReady: s.setIsStreamReady,
|
setIsStreamReady: s.setIsStreamReady,
|
||||||
|
isLSPServerReady: s.isLSPServerReady,
|
||||||
|
setIsLSPServerReady: s.setIsLSPServerReady,
|
||||||
isMouseDownInStream: s.isMouseDownInStream,
|
isMouseDownInStream: s.isMouseDownInStream,
|
||||||
cmdId: s.cmdId,
|
|
||||||
setCmdId: s.setCmdId,
|
|
||||||
formatCode: s.formatCode,
|
formatCode: s.formatCode,
|
||||||
addKCLError: s.addKCLError,
|
addKCLError: s.addKCLError,
|
||||||
openPanes: s.openPanes,
|
openPanes: s.openPanes,
|
||||||
setOpenPanes: s.setOpenPanes,
|
setOpenPanes: s.setOpenPanes,
|
||||||
didDragInStream: s.didDragInStream,
|
didDragInStream: s.didDragInStream,
|
||||||
setDidDragInStream: s.setDidDragInStream,
|
|
||||||
setStreamDimensions: s.setStreamDimensions,
|
setStreamDimensions: s.setStreamDimensions,
|
||||||
streamDimensions: s.streamDimensions,
|
streamDimensions: s.streamDimensions,
|
||||||
}))
|
}))
|
||||||
@ -134,7 +139,7 @@ export function App() {
|
|||||||
context: { token },
|
context: { token },
|
||||||
},
|
},
|
||||||
settings: {
|
settings: {
|
||||||
context: { showDebugPanel, theme, onboardingStatus },
|
context: { showDebugPanel, theme, onboardingStatus, textWrapping },
|
||||||
},
|
},
|
||||||
} = useGlobalStateContext()
|
} = useGlobalStateContext()
|
||||||
|
|
||||||
@ -249,7 +254,6 @@ export function App() {
|
|||||||
codeBasedSelections,
|
codeBasedSelections,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const pixelDensity = window.devicePixelRatio
|
|
||||||
const streamWidth = streamRef?.current?.offsetWidth
|
const streamWidth = streamRef?.current?.offsetWidth
|
||||||
const streamHeight = streamRef?.current?.offsetHeight
|
const streamHeight = streamRef?.current?.offsetHeight
|
||||||
|
|
||||||
@ -329,11 +333,14 @@ export function App() {
|
|||||||
const unSubHover = engineCommandManager.subscribeToUnreliable({
|
const unSubHover = engineCommandManager.subscribeToUnreliable({
|
||||||
event: 'highlight_set_entity',
|
event: 'highlight_set_entity',
|
||||||
callback: ({ data }) => {
|
callback: ({ data }) => {
|
||||||
if (!data?.entity_id) {
|
if (data?.entity_id) {
|
||||||
setHighlightRange([0, 0])
|
|
||||||
} else {
|
|
||||||
const sourceRange = sourceRangeMap[data.entity_id]
|
const sourceRange = sourceRangeMap[data.entity_id]
|
||||||
setHighlightRange(sourceRange)
|
setHighlightRange(sourceRange)
|
||||||
|
} else if (
|
||||||
|
!highlightRange ||
|
||||||
|
(highlightRange[0] !== 0 && highlightRange[1] !== 0)
|
||||||
|
) {
|
||||||
|
setHighlightRange([0, 0])
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -382,9 +389,6 @@ export function App() {
|
|||||||
nativeEvent,
|
nativeEvent,
|
||||||
}) => {
|
}) => {
|
||||||
nativeEvent.preventDefault()
|
nativeEvent.preventDefault()
|
||||||
if (isMouseDownInStream) {
|
|
||||||
setDidDragInStream(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { x, y } = getNormalisedCoordinates({
|
const { x, y } = getNormalisedCoordinates({
|
||||||
clientX,
|
clientX,
|
||||||
@ -396,9 +400,8 @@ export function App() {
|
|||||||
const interaction = ctrlKey ? 'zoom' : shiftKey ? 'pan' : 'rotate'
|
const interaction = ctrlKey ? 'zoom' : shiftKey ? 'pan' : 'rotate'
|
||||||
|
|
||||||
const newCmdId = uuidv4()
|
const newCmdId = uuidv4()
|
||||||
setCmdId(newCmdId)
|
|
||||||
|
|
||||||
if (cmdId && isMouseDownInStream) {
|
if (isMouseDownInStream) {
|
||||||
debounceSocketSend({
|
debounceSocketSend({
|
||||||
type: 'modeling_cmd_req',
|
type: 'modeling_cmd_req',
|
||||||
cmd: {
|
cmd: {
|
||||||
@ -420,15 +423,63 @@ export function App() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const extraExtensions = useMemo(() => {
|
// So this is a bit weird, we need to initialize the lsp server and client.
|
||||||
if (TEST) return []
|
// But the server happens async so we break this into two parts.
|
||||||
return [
|
// Below is the client and server promise.
|
||||||
lintGutter(),
|
const { lspClient } = useMemo(() => {
|
||||||
linter((_view) => {
|
const intoServer: IntoServer = new IntoServer()
|
||||||
return kclErrToDiagnostic(useStore.getState().kclErrors)
|
const fromServer: FromServer = FromServer.create()
|
||||||
}),
|
const client = new Client(fromServer, intoServer)
|
||||||
]
|
if (!TEST) {
|
||||||
}, [])
|
Server.initialize(intoServer, fromServer).then((lspServer) => {
|
||||||
|
lspServer.start()
|
||||||
|
setIsLSPServerReady(true)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const lspClient = new LanguageServerClient({ client })
|
||||||
|
return { lspClient }
|
||||||
|
}, [setIsLSPServerReady])
|
||||||
|
|
||||||
|
// Here we initialize the plugin which will start the client.
|
||||||
|
// When we have multi-file support the name of the file will be a dep of
|
||||||
|
// this use memo, as well as the directory structure, which I think is
|
||||||
|
// a good setup becuase it will restart the client but not the server :)
|
||||||
|
// We do not want to restart the server, its just wasteful.
|
||||||
|
const kclLSP = useMemo(() => {
|
||||||
|
let plugin = null
|
||||||
|
if (isLSPServerReady && !TEST) {
|
||||||
|
// Set up the lsp plugin.
|
||||||
|
const lsp = kclLanguage({
|
||||||
|
// When we have more than one file, we'll need to change this.
|
||||||
|
documentUri: `file:///we-just-have-one-file-for-now.kcl`,
|
||||||
|
workspaceFolders: null,
|
||||||
|
client: lspClient,
|
||||||
|
})
|
||||||
|
|
||||||
|
plugin = lsp
|
||||||
|
}
|
||||||
|
return plugin
|
||||||
|
}, [lspClient, isLSPServerReady])
|
||||||
|
|
||||||
|
const editorExtensions = useMemo(() => {
|
||||||
|
const extensions = [lineHighlightField] as Extension[]
|
||||||
|
|
||||||
|
if (kclLSP) extensions.push(kclLSP)
|
||||||
|
|
||||||
|
// These extensions have proven to mess with vitest
|
||||||
|
if (!TEST) {
|
||||||
|
extensions.push(
|
||||||
|
lintGutter(),
|
||||||
|
linter((_view) => {
|
||||||
|
return kclErrToDiagnostic(useStore.getState().kclErrors)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
if (textWrapping === 'On') extensions.push(EditorView.lineWrapping)
|
||||||
|
}
|
||||||
|
|
||||||
|
return extensions
|
||||||
|
}, [kclLSP, textWrapping])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
@ -483,15 +534,15 @@ export function App() {
|
|||||||
format
|
format
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<div id="code-mirror-override">
|
<div
|
||||||
|
id="code-mirror-override"
|
||||||
|
className="full-height-subtract"
|
||||||
|
style={{ '--height-subtract': '4.25rem' } as CSSRuleObject}
|
||||||
|
>
|
||||||
<CodeMirror
|
<CodeMirror
|
||||||
className="h-full"
|
className="h-full"
|
||||||
value={code}
|
value={code}
|
||||||
extensions={[
|
extensions={editorExtensions}
|
||||||
langs.javascript({ jsx: true }),
|
|
||||||
lineHighlightField,
|
|
||||||
...extraExtensions,
|
|
||||||
]}
|
|
||||||
onChange={onChange}
|
onChange={onChange}
|
||||||
onUpdate={onUpdate}
|
onUpdate={onUpdate}
|
||||||
theme={editorTheme}
|
theme={editorTheme}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
.panel {
|
.panel {
|
||||||
@apply relative overflow-auto z-0;
|
@apply relative z-0;
|
||||||
@apply bg-chalkboard-10/70 backdrop-blur-sm;
|
@apply bg-chalkboard-10/70 backdrop-blur-sm;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ const CommandBar = () => {
|
|||||||
setCommandBarOpen(false)
|
setCommandBarOpen(false)
|
||||||
clearState()
|
clearState()
|
||||||
}}
|
}}
|
||||||
className="fixed inset-0 overflow-y-auto p-4 pt-[25vh]"
|
className="fixed inset-0 z-40 overflow-y-auto p-4 pt-[25vh]"
|
||||||
>
|
>
|
||||||
<Transition.Child
|
<Transition.Child
|
||||||
enter="duration-100 ease-out"
|
enter="duration-100 ease-out"
|
||||||
@ -207,7 +207,7 @@ const CommandBar = () => {
|
|||||||
leaveTo="opacity-0"
|
leaveTo="opacity-0"
|
||||||
as={Fragment}
|
as={Fragment}
|
||||||
>
|
>
|
||||||
<Dialog.Overlay className="fixed z-40 inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
|
<Dialog.Overlay className="fixed inset-0 bg-chalkboard-10/70 dark:bg-chalkboard-110/50" />
|
||||||
</Transition.Child>
|
</Transition.Child>
|
||||||
<Transition.Child
|
<Transition.Child
|
||||||
enter="duration-100 ease-out"
|
enter="duration-100 ease-out"
|
||||||
@ -221,7 +221,7 @@ const CommandBar = () => {
|
|||||||
<Combobox
|
<Combobox
|
||||||
value={selectedCommand}
|
value={selectedCommand}
|
||||||
onChange={handleCommandSelection}
|
onChange={handleCommandSelection}
|
||||||
className="rounded relative mx-auto z-40 p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
|
className="rounded relative mx-auto p-2 bg-chalkboard-10 dark:bg-chalkboard-100 border dark:border-chalkboard-70 max-w-xl w-full shadow-lg"
|
||||||
as="div"
|
as="div"
|
||||||
>
|
>
|
||||||
<div className="flex gap-2 items-center">
|
<div className="flex gap-2 items-center">
|
||||||
|
@ -39,7 +39,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
const initialValues: OutputFormat = {
|
const initialValues: OutputFormat = {
|
||||||
type: defaultType,
|
type: defaultType,
|
||||||
storage: 'embedded',
|
storage: 'embedded',
|
||||||
presentation: 'compact',
|
presentation: 'pretty',
|
||||||
}
|
}
|
||||||
const formik = useFormik({
|
const formik = useFormik({
|
||||||
initialValues,
|
initialValues,
|
||||||
@ -83,8 +83,6 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const yo = formik.values
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<ActionButton
|
<ActionButton
|
||||||
|
@ -12,12 +12,12 @@ import Loading from './Loading'
|
|||||||
|
|
||||||
export const Stream = ({ className = '' }) => {
|
export const Stream = ({ className = '' }) => {
|
||||||
const [isLoading, setIsLoading] = useState(true)
|
const [isLoading, setIsLoading] = useState(true)
|
||||||
|
const [clickCoords, setClickCoords] = useState<{ x: number; y: number }>()
|
||||||
const videoRef = useRef<HTMLVideoElement>(null)
|
const videoRef = useRef<HTMLVideoElement>(null)
|
||||||
const {
|
const {
|
||||||
mediaStream,
|
mediaStream,
|
||||||
engineCommandManager,
|
engineCommandManager,
|
||||||
setIsMouseDownInStream,
|
setIsMouseDownInStream,
|
||||||
setCmdId,
|
|
||||||
didDragInStream,
|
didDragInStream,
|
||||||
setDidDragInStream,
|
setDidDragInStream,
|
||||||
streamDimensions,
|
streamDimensions,
|
||||||
@ -27,7 +27,6 @@ export const Stream = ({ className = '' }) => {
|
|||||||
isMouseDownInStream: s.isMouseDownInStream,
|
isMouseDownInStream: s.isMouseDownInStream,
|
||||||
setIsMouseDownInStream: s.setIsMouseDownInStream,
|
setIsMouseDownInStream: s.setIsMouseDownInStream,
|
||||||
fileId: s.fileId,
|
fileId: s.fileId,
|
||||||
setCmdId: s.setCmdId,
|
|
||||||
didDragInStream: s.didDragInStream,
|
didDragInStream: s.didDragInStream,
|
||||||
setDidDragInStream: s.setDidDragInStream,
|
setDidDragInStream: s.setDidDragInStream,
|
||||||
streamDimensions: s.streamDimensions,
|
streamDimensions: s.streamDimensions,
|
||||||
@ -59,7 +58,6 @@ export const Stream = ({ className = '' }) => {
|
|||||||
console.log('click', x, y)
|
console.log('click', x, y)
|
||||||
|
|
||||||
const newId = uuidv4()
|
const newId = uuidv4()
|
||||||
setCmdId(newId)
|
|
||||||
|
|
||||||
const interaction = ctrlKey ? 'pan' : 'rotate'
|
const interaction = ctrlKey ? 'pan' : 'rotate'
|
||||||
|
|
||||||
@ -74,6 +72,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
setIsMouseDownInStream(true)
|
setIsMouseDownInStream(true)
|
||||||
|
setClickCoords({ x, y })
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
||||||
@ -127,6 +126,19 @@ export const Stream = ({ className = '' }) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
setDidDragInStream(false)
|
setDidDragInStream(false)
|
||||||
|
setClickCoords(undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleMouseMove: MouseEventHandler<HTMLVideoElement> = (e) => {
|
||||||
|
if (!clickCoords) return
|
||||||
|
|
||||||
|
const delta =
|
||||||
|
((clickCoords.x - e.clientX) ** 2 + (clickCoords.y - e.clientY) ** 2) **
|
||||||
|
0.5
|
||||||
|
|
||||||
|
if (delta > 5 && !didDragInStream) {
|
||||||
|
setDidDragInStream(true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -142,6 +154,7 @@ export const Stream = ({ className = '' }) => {
|
|||||||
onContextMenuCapture={(e) => e.preventDefault()}
|
onContextMenuCapture={(e) => e.preventDefault()}
|
||||||
onWheel={handleScroll}
|
onWheel={handleScroll}
|
||||||
onPlay={() => setIsLoading(false)}
|
onPlay={() => setIsLoading(false)}
|
||||||
|
onMouseMoveCapture={handleMouseMove}
|
||||||
className="w-full h-full"
|
className="w-full h-full"
|
||||||
/>
|
/>
|
||||||
{isLoading && (
|
{isLoading && (
|
||||||
|
185
src/editor/lsp/client.ts
Normal file
185
src/editor/lsp/client.ts
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
import * as jsrpc from 'json-rpc-2.0'
|
||||||
|
import * as LSP from 'vscode-languageserver-protocol'
|
||||||
|
|
||||||
|
import {
|
||||||
|
registerServerCapability,
|
||||||
|
unregisterServerCapability,
|
||||||
|
} from './server-capability-registration'
|
||||||
|
import { Codec, FromServer, IntoServer } from './codec'
|
||||||
|
|
||||||
|
const client_capabilities: LSP.ClientCapabilities = {
|
||||||
|
textDocument: {
|
||||||
|
hover: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
contentFormat: ['plaintext', 'markdown'],
|
||||||
|
},
|
||||||
|
moniker: {},
|
||||||
|
synchronization: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
willSave: false,
|
||||||
|
didSave: false,
|
||||||
|
willSaveWaitUntil: false,
|
||||||
|
},
|
||||||
|
completion: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
completionItem: {
|
||||||
|
snippetSupport: false,
|
||||||
|
commitCharactersSupport: true,
|
||||||
|
documentationFormat: ['plaintext', 'markdown'],
|
||||||
|
deprecatedSupport: false,
|
||||||
|
preselectSupport: false,
|
||||||
|
},
|
||||||
|
contextSupport: false,
|
||||||
|
},
|
||||||
|
signatureHelp: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
signatureInformation: {
|
||||||
|
documentationFormat: ['plaintext', 'markdown'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
declaration: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
linkSupport: true,
|
||||||
|
},
|
||||||
|
definition: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
linkSupport: true,
|
||||||
|
},
|
||||||
|
typeDefinition: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
linkSupport: true,
|
||||||
|
},
|
||||||
|
implementation: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
linkSupport: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
workspace: {
|
||||||
|
didChangeConfiguration: {
|
||||||
|
dynamicRegistration: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class Client extends jsrpc.JSONRPCServerAndClient {
|
||||||
|
afterInitializedHooks: (() => Promise<void>)[] = []
|
||||||
|
#fromServer: FromServer
|
||||||
|
private serverCapabilities: LSP.ServerCapabilities<any> = {}
|
||||||
|
|
||||||
|
constructor(fromServer: FromServer, intoServer: IntoServer) {
|
||||||
|
super(
|
||||||
|
new jsrpc.JSONRPCServer(),
|
||||||
|
new jsrpc.JSONRPCClient(async (json: jsrpc.JSONRPCRequest) => {
|
||||||
|
const encoded = Codec.encode(json)
|
||||||
|
intoServer.enqueue(encoded)
|
||||||
|
if (null != json.id) {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
const response = await fromServer.responses.get(json.id)!
|
||||||
|
this.client.receive(response as jsrpc.JSONRPCResponse)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
this.#fromServer = fromServer
|
||||||
|
}
|
||||||
|
|
||||||
|
async start(): Promise<void> {
|
||||||
|
// process "window/logMessage": client <- server
|
||||||
|
this.addMethod(LSP.LogMessageNotification.type.method, (params) => {
|
||||||
|
const { type, message } = params as {
|
||||||
|
type: LSP.MessageType
|
||||||
|
message: string
|
||||||
|
}
|
||||||
|
let messageString = ''
|
||||||
|
switch (type) {
|
||||||
|
case LSP.MessageType.Error: {
|
||||||
|
messageString += '[error] '
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case LSP.MessageType.Warning: {
|
||||||
|
messageString += ' [warn] '
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case LSP.MessageType.Info: {
|
||||||
|
messageString += ' [info] '
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case LSP.MessageType.Log: {
|
||||||
|
messageString += ' [log] '
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
messageString += message
|
||||||
|
// console.log(messageString)
|
||||||
|
return
|
||||||
|
})
|
||||||
|
|
||||||
|
// process "client/registerCapability": client <- server
|
||||||
|
this.addMethod(LSP.RegistrationRequest.type.method, (params) => {
|
||||||
|
// Register a server capability.
|
||||||
|
params.registrations.forEach(
|
||||||
|
(capabilityRegistration: LSP.Registration) => {
|
||||||
|
this.serverCapabilities = registerServerCapability(
|
||||||
|
this.serverCapabilities,
|
||||||
|
capabilityRegistration
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
// process "client/unregisterCapability": client <- server
|
||||||
|
this.addMethod(LSP.UnregistrationRequest.type.method, (params) => {
|
||||||
|
// Unregister a server capability.
|
||||||
|
params.unregisterations.forEach(
|
||||||
|
(capabilityUnregistration: LSP.Unregistration) => {
|
||||||
|
this.serverCapabilities = unregisterServerCapability(
|
||||||
|
this.serverCapabilities,
|
||||||
|
capabilityUnregistration
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
// request "initialize": client <-> server
|
||||||
|
const { capabilities } = await this.request(
|
||||||
|
LSP.InitializeRequest.type.method,
|
||||||
|
{
|
||||||
|
processId: null,
|
||||||
|
clientInfo: {
|
||||||
|
name: 'kcl-language-client',
|
||||||
|
},
|
||||||
|
capabilities: client_capabilities,
|
||||||
|
rootUri: null,
|
||||||
|
} as LSP.InitializeParams
|
||||||
|
)
|
||||||
|
|
||||||
|
this.serverCapabilities = capabilities
|
||||||
|
|
||||||
|
// notify "initialized": client --> server
|
||||||
|
this.notify(LSP.InitializedNotification.type.method, {})
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
this.afterInitializedHooks.map((f: () => Promise<void>) => f())
|
||||||
|
)
|
||||||
|
await Promise.all([this.processNotifications(), this.processRequests()])
|
||||||
|
}
|
||||||
|
|
||||||
|
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||||
|
return this.serverCapabilities
|
||||||
|
}
|
||||||
|
|
||||||
|
async processNotifications(): Promise<void> {
|
||||||
|
for await (const notification of this.#fromServer.notifications) {
|
||||||
|
await this.receiveAndSend(notification)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async processRequests(): Promise<void> {
|
||||||
|
for await (const request of this.#fromServer.requests) {
|
||||||
|
await this.receiveAndSend(request)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pushAfterInitializeHook(...hooks: (() => Promise<void>)[]): void {
|
||||||
|
this.afterInitializedHooks.push(...hooks)
|
||||||
|
}
|
||||||
|
}
|
53
src/editor/lsp/codec.ts
Normal file
53
src/editor/lsp/codec.ts
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import * as jsrpc from 'json-rpc-2.0'
|
||||||
|
import * as vsrpc from 'vscode-jsonrpc'
|
||||||
|
|
||||||
|
import Bytes from './codec/bytes'
|
||||||
|
import StreamDemuxer from './codec/demuxer'
|
||||||
|
import Headers from './codec/headers'
|
||||||
|
import Queue from './codec/queue'
|
||||||
|
import Tracer from './tracer'
|
||||||
|
|
||||||
|
export const encoder = new TextEncoder()
|
||||||
|
export const decoder = new TextDecoder()
|
||||||
|
|
||||||
|
export class Codec {
|
||||||
|
static encode(
|
||||||
|
json: jsrpc.JSONRPCRequest | jsrpc.JSONRPCResponse
|
||||||
|
): Uint8Array {
|
||||||
|
const message = JSON.stringify(json)
|
||||||
|
const delimited = Headers.add(message)
|
||||||
|
return Bytes.encode(delimited)
|
||||||
|
}
|
||||||
|
|
||||||
|
static decode<T>(data: Uint8Array): T {
|
||||||
|
const delimited = Bytes.decode(data)
|
||||||
|
const message = Headers.remove(delimited)
|
||||||
|
return JSON.parse(message) as T
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: tracing effiency
|
||||||
|
export class IntoServer
|
||||||
|
extends Queue<Uint8Array>
|
||||||
|
implements AsyncGenerator<Uint8Array, never, void>
|
||||||
|
{
|
||||||
|
enqueue(item: Uint8Array): void {
|
||||||
|
Tracer.client(Headers.remove(decoder.decode(item)))
|
||||||
|
super.enqueue(item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FromServer extends WritableStream<Uint8Array> {
|
||||||
|
readonly responses: {
|
||||||
|
get(key: number | string): null | Promise<vsrpc.ResponseMessage>
|
||||||
|
}
|
||||||
|
readonly notifications: AsyncGenerator<vsrpc.NotificationMessage, never, void>
|
||||||
|
readonly requests: AsyncGenerator<vsrpc.RequestMessage, never, void>
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||||
|
export namespace FromServer {
|
||||||
|
export function create(): FromServer {
|
||||||
|
return new StreamDemuxer()
|
||||||
|
}
|
||||||
|
}
|
27
src/editor/lsp/codec/bytes.ts
Normal file
27
src/editor/lsp/codec/bytes.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import { encoder, decoder } from '../codec'
|
||||||
|
|
||||||
|
export default class Bytes {
|
||||||
|
static encode(input: string): Uint8Array {
|
||||||
|
return encoder.encode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
static decode(input: Uint8Array): string {
|
||||||
|
return decoder.decode(input)
|
||||||
|
}
|
||||||
|
|
||||||
|
static append<
|
||||||
|
T extends { length: number; set(arr: T, offset: number): void }
|
||||||
|
>(constructor: { new (length: number): T }, ...arrays: T[]) {
|
||||||
|
let totalLength = 0
|
||||||
|
for (const arr of arrays) {
|
||||||
|
totalLength += arr.length
|
||||||
|
}
|
||||||
|
const result = new constructor(totalLength)
|
||||||
|
let offset = 0
|
||||||
|
for (const arr of arrays) {
|
||||||
|
result.set(arr, offset)
|
||||||
|
offset += arr.length
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
82
src/editor/lsp/codec/demuxer.ts
Normal file
82
src/editor/lsp/codec/demuxer.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import * as vsrpc from 'vscode-jsonrpc'
|
||||||
|
|
||||||
|
import Bytes from './bytes'
|
||||||
|
import PromiseMap from './map'
|
||||||
|
import Queue from './queue'
|
||||||
|
import Tracer from '../tracer'
|
||||||
|
|
||||||
|
export default class StreamDemuxer extends Queue<Uint8Array> {
|
||||||
|
readonly responses: PromiseMap<number | string, vsrpc.ResponseMessage> =
|
||||||
|
new PromiseMap()
|
||||||
|
readonly notifications: Queue<vsrpc.NotificationMessage> =
|
||||||
|
new Queue<vsrpc.NotificationMessage>()
|
||||||
|
readonly requests: Queue<vsrpc.RequestMessage> =
|
||||||
|
new Queue<vsrpc.RequestMessage>()
|
||||||
|
|
||||||
|
readonly #start: Promise<void>
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super()
|
||||||
|
this.#start = this.start()
|
||||||
|
}
|
||||||
|
|
||||||
|
private async start(): Promise<void> {
|
||||||
|
let contentLength: null | number = null
|
||||||
|
let buffer = new Uint8Array()
|
||||||
|
|
||||||
|
for await (const bytes of this) {
|
||||||
|
buffer = Bytes.append(Uint8Array, buffer, bytes)
|
||||||
|
while (buffer.length > 0) {
|
||||||
|
// check if the content length is known
|
||||||
|
if (null == contentLength) {
|
||||||
|
// if not, try to match the prefixed headers
|
||||||
|
const match = Bytes.decode(buffer).match(
|
||||||
|
/^Content-Length:\s*(\d+)\s*/
|
||||||
|
)
|
||||||
|
if (null == match) continue
|
||||||
|
|
||||||
|
// try to parse the content-length from the headers
|
||||||
|
const length = parseInt(match[1])
|
||||||
|
if (isNaN(length)) throw new Error('invalid content length')
|
||||||
|
|
||||||
|
// slice the headers since we now have the content length
|
||||||
|
buffer = buffer.slice(match[0].length)
|
||||||
|
|
||||||
|
// set the content length
|
||||||
|
contentLength = length
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the buffer doesn't contain a full message; await another iteration
|
||||||
|
if (buffer.length < contentLength) continue
|
||||||
|
|
||||||
|
// Get just the slice of the buffer that is our content length.
|
||||||
|
const slice = buffer.slice(0, contentLength)
|
||||||
|
|
||||||
|
// decode buffer to a string
|
||||||
|
const delimited = Bytes.decode(slice)
|
||||||
|
|
||||||
|
// reset the buffer
|
||||||
|
buffer = buffer.slice(contentLength)
|
||||||
|
// reset the contentLength
|
||||||
|
contentLength = null
|
||||||
|
|
||||||
|
const message = JSON.parse(delimited) as vsrpc.Message
|
||||||
|
Tracer.server(message)
|
||||||
|
|
||||||
|
// demux the message stream
|
||||||
|
if (vsrpc.Message.isResponse(message) && null != message.id) {
|
||||||
|
this.responses.set(message.id, message)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (vsrpc.Message.isNotification(message)) {
|
||||||
|
this.notifications.enqueue(message)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (vsrpc.Message.isRequest(message)) {
|
||||||
|
this.requests.enqueue(message)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
9
src/editor/lsp/codec/headers.ts
Normal file
9
src/editor/lsp/codec/headers.ts
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
export default class Headers {
|
||||||
|
static add(message: string): string {
|
||||||
|
return `Content-Length: ${message.length}\r\n\r\n${message}`
|
||||||
|
}
|
||||||
|
|
||||||
|
static remove(delimited: string): string {
|
||||||
|
return delimited.replace(/^Content-Length:\s*\d+\s*/, '')
|
||||||
|
}
|
||||||
|
}
|
72
src/editor/lsp/codec/map.ts
Normal file
72
src/editor/lsp/codec/map.ts
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
export default class PromiseMap<K, V extends { toString(): string }> {
|
||||||
|
#map: Map<K, PromiseMap.Entry<V>> = new Map()
|
||||||
|
|
||||||
|
get(key: K & { toString(): string }): null | Promise<V> {
|
||||||
|
let initialized: PromiseMap.Entry<V>
|
||||||
|
// if the entry doesn't exist, set it
|
||||||
|
if (!this.#map.has(key)) {
|
||||||
|
initialized = this.#set(key)
|
||||||
|
} else {
|
||||||
|
// otherwise return the entry
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
initialized = this.#map.get(key)!
|
||||||
|
}
|
||||||
|
// if the entry is a pending promise, return it
|
||||||
|
if (initialized.status === 'pending') {
|
||||||
|
return initialized.promise
|
||||||
|
} else {
|
||||||
|
// otherwise return null
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#set(key: K, value?: V): PromiseMap.Entry<V> {
|
||||||
|
if (this.#map.has(key)) {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
return this.#map.get(key)!
|
||||||
|
}
|
||||||
|
// placeholder resolver for entry
|
||||||
|
let resolve = (item: V) => {
|
||||||
|
void item
|
||||||
|
}
|
||||||
|
// promise for entry (which assigns the resolver
|
||||||
|
const promise = new Promise<V>((resolver) => {
|
||||||
|
resolve = resolver
|
||||||
|
})
|
||||||
|
// the initialized entry
|
||||||
|
const initialized: PromiseMap.Entry<V> = {
|
||||||
|
status: 'pending',
|
||||||
|
resolve,
|
||||||
|
promise,
|
||||||
|
}
|
||||||
|
if (null != value) {
|
||||||
|
initialized.resolve(value)
|
||||||
|
}
|
||||||
|
// set the entry
|
||||||
|
this.#map.set(key, initialized)
|
||||||
|
return initialized
|
||||||
|
}
|
||||||
|
|
||||||
|
set(key: K & { toString(): string }, value: V): this {
|
||||||
|
const initialized = this.#set(key, value)
|
||||||
|
// if the promise is pending ...
|
||||||
|
if (initialized.status === 'pending') {
|
||||||
|
// ... set the entry status to resolved to free the promise
|
||||||
|
this.#map.set(key, { status: 'resolved' })
|
||||||
|
// ... and resolve the promise with the given value
|
||||||
|
initialized.resolve(value)
|
||||||
|
}
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
get size(): number {
|
||||||
|
return this.#map.size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||||
|
export namespace PromiseMap {
|
||||||
|
export type Entry<V> =
|
||||||
|
| { status: 'pending'; resolve: (item: V) => void; promise: Promise<V> }
|
||||||
|
| { status: 'resolved' }
|
||||||
|
}
|
113
src/editor/lsp/codec/queue.ts
Normal file
113
src/editor/lsp/codec/queue.ts
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
export default class Queue<T>
|
||||||
|
implements WritableStream<T>, AsyncGenerator<T, never, void>
|
||||||
|
{
|
||||||
|
readonly #promises: Promise<T>[] = []
|
||||||
|
readonly #resolvers: ((item: T) => void)[] = []
|
||||||
|
readonly #observers: ((item: T) => void)[] = []
|
||||||
|
|
||||||
|
#closed = false
|
||||||
|
#locked = false
|
||||||
|
readonly #stream: WritableStream<T>
|
||||||
|
|
||||||
|
static #__add<X>(
|
||||||
|
promises: Promise<X>[],
|
||||||
|
resolvers: ((item: X) => void)[]
|
||||||
|
): void {
|
||||||
|
promises.push(
|
||||||
|
new Promise((resolve) => {
|
||||||
|
resolvers.push(resolve)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
static #__enqueue<X>(
|
||||||
|
closed: boolean,
|
||||||
|
promises: Promise<X>[],
|
||||||
|
resolvers: ((item: X) => void)[],
|
||||||
|
item: X
|
||||||
|
): void {
|
||||||
|
if (!closed) {
|
||||||
|
if (!resolvers.length) Queue.#__add(promises, resolvers)
|
||||||
|
const resolve = resolvers.shift()! // eslint-disable-line @typescript-eslint/no-non-null-assertion
|
||||||
|
resolve(item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
const closed = this.#closed
|
||||||
|
const promises = this.#promises
|
||||||
|
const resolvers = this.#resolvers
|
||||||
|
this.#stream = new WritableStream({
|
||||||
|
write(item: T): void {
|
||||||
|
Queue.#__enqueue(closed, promises, resolvers, item)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#add(): void {
|
||||||
|
return Queue.#__add(this.#promises, this.#resolvers)
|
||||||
|
}
|
||||||
|
|
||||||
|
enqueue(item: T): void {
|
||||||
|
return Queue.#__enqueue(this.#closed, this.#promises, this.#resolvers, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
dequeue(): Promise<T> {
|
||||||
|
if (!this.#promises.length) this.#add()
|
||||||
|
const item = this.#promises.shift()! // eslint-disable-line @typescript-eslint/no-non-null-assertion
|
||||||
|
return item
|
||||||
|
}
|
||||||
|
|
||||||
|
isEmpty(): boolean {
|
||||||
|
return !this.#promises.length
|
||||||
|
}
|
||||||
|
|
||||||
|
isBlocked(): boolean {
|
||||||
|
return !!this.#resolvers.length
|
||||||
|
}
|
||||||
|
|
||||||
|
get length(): number {
|
||||||
|
return this.#promises.length - this.#resolvers.length
|
||||||
|
}
|
||||||
|
|
||||||
|
async next(): Promise<IteratorResult<T, never>> {
|
||||||
|
const done = false
|
||||||
|
const value = await this.dequeue()
|
||||||
|
for (const observer of this.#observers) {
|
||||||
|
observer(value)
|
||||||
|
}
|
||||||
|
return { done, value }
|
||||||
|
}
|
||||||
|
|
||||||
|
return(): Promise<IteratorResult<T, never>> {
|
||||||
|
return new Promise(() => {
|
||||||
|
// empty
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
throw(err: Error): Promise<IteratorResult<T, never>> {
|
||||||
|
return new Promise((_resolve, reject) => {
|
||||||
|
reject(err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
[Symbol.asyncIterator](): AsyncGenerator<T, never, void> {
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
get locked(): boolean {
|
||||||
|
return this.#stream.locked
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(reason?: Error): Promise<void> {
|
||||||
|
return this.#stream.abort(reason)
|
||||||
|
}
|
||||||
|
|
||||||
|
close(): Promise<void> {
|
||||||
|
return this.#stream.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
getWriter(): WritableStreamDefaultWriter<T> {
|
||||||
|
return this.#stream.getWriter()
|
||||||
|
}
|
||||||
|
}
|
151
src/editor/lsp/index.ts
Normal file
151
src/editor/lsp/index.ts
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
import type * as LSP from 'vscode-languageserver-protocol'
|
||||||
|
import Client from './client'
|
||||||
|
import { LanguageServerPlugin } from './plugin'
|
||||||
|
import { SemanticToken, deserializeTokens } from './semantic_tokens'
|
||||||
|
|
||||||
|
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
|
||||||
|
|
||||||
|
// Client to server then server to client
|
||||||
|
interface LSPRequestMap {
|
||||||
|
initialize: [LSP.InitializeParams, LSP.InitializeResult]
|
||||||
|
'textDocument/hover': [LSP.HoverParams, LSP.Hover]
|
||||||
|
'textDocument/completion': [
|
||||||
|
LSP.CompletionParams,
|
||||||
|
LSP.CompletionItem[] | LSP.CompletionList | null
|
||||||
|
]
|
||||||
|
'textDocument/semanticTokens/full': [
|
||||||
|
LSP.SemanticTokensParams,
|
||||||
|
LSP.SemanticTokens
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client to server
|
||||||
|
interface LSPNotifyMap {
|
||||||
|
initialized: LSP.InitializedParams
|
||||||
|
'textDocument/didChange': LSP.DidChangeTextDocumentParams
|
||||||
|
'textDocument/didOpen': LSP.DidOpenTextDocumentParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// Server to client
|
||||||
|
interface LSPEventMap {
|
||||||
|
'textDocument/publishDiagnostics': LSP.PublishDiagnosticsParams
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Notification = {
|
||||||
|
[key in keyof LSPEventMap]: {
|
||||||
|
jsonrpc: '2.0'
|
||||||
|
id?: null | undefined
|
||||||
|
method: key
|
||||||
|
params: LSPEventMap[key]
|
||||||
|
}
|
||||||
|
}[keyof LSPEventMap]
|
||||||
|
|
||||||
|
export interface LanguageServerClientOptions {
|
||||||
|
client: Client
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LanguageServerClient {
|
||||||
|
private client: Client
|
||||||
|
|
||||||
|
public ready: boolean
|
||||||
|
|
||||||
|
private plugins: LanguageServerPlugin[]
|
||||||
|
|
||||||
|
public initializePromise: Promise<void>
|
||||||
|
|
||||||
|
private isUpdatingSemanticTokens: boolean = false
|
||||||
|
private semanticTokens: SemanticToken[] = []
|
||||||
|
|
||||||
|
constructor(options: LanguageServerClientOptions) {
|
||||||
|
this.plugins = []
|
||||||
|
this.client = options.client
|
||||||
|
|
||||||
|
this.ready = false
|
||||||
|
|
||||||
|
this.initializePromise = this.initialize()
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize() {
|
||||||
|
// Start the client in the background.
|
||||||
|
this.client.start()
|
||||||
|
|
||||||
|
this.ready = true
|
||||||
|
}
|
||||||
|
|
||||||
|
getServerCapabilities(): LSP.ServerCapabilities<any> {
|
||||||
|
return this.client.getServerCapabilities()
|
||||||
|
}
|
||||||
|
|
||||||
|
close() {}
|
||||||
|
|
||||||
|
textDocumentDidOpen(params: LSP.DidOpenTextDocumentParams) {
|
||||||
|
this.notify('textDocument/didOpen', params)
|
||||||
|
|
||||||
|
this.updateSemanticTokens(params.textDocument.uri)
|
||||||
|
}
|
||||||
|
|
||||||
|
textDocumentDidChange(params: LSP.DidChangeTextDocumentParams) {
|
||||||
|
this.notify('textDocument/didChange', params)
|
||||||
|
this.updateSemanticTokens(params.textDocument.uri)
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateSemanticTokens(uri: string) {
|
||||||
|
// Make sure we can only run, if we aren't already running.
|
||||||
|
if (!this.isUpdatingSemanticTokens) {
|
||||||
|
this.isUpdatingSemanticTokens = true
|
||||||
|
|
||||||
|
const result = await this.request('textDocument/semanticTokens/full', {
|
||||||
|
textDocument: {
|
||||||
|
uri,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.semanticTokens = deserializeTokens(
|
||||||
|
result.data,
|
||||||
|
this.getServerCapabilities().semanticTokensProvider
|
||||||
|
)
|
||||||
|
|
||||||
|
this.isUpdatingSemanticTokens = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSemanticTokens(): SemanticToken[] {
|
||||||
|
return this.semanticTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
async textDocumentHover(params: LSP.HoverParams) {
|
||||||
|
return await this.request('textDocument/hover', params)
|
||||||
|
}
|
||||||
|
|
||||||
|
async textDocumentCompletion(params: LSP.CompletionParams) {
|
||||||
|
return await this.request('textDocument/completion', params)
|
||||||
|
}
|
||||||
|
|
||||||
|
attachPlugin(plugin: LanguageServerPlugin) {
|
||||||
|
this.plugins.push(plugin)
|
||||||
|
}
|
||||||
|
|
||||||
|
detachPlugin(plugin: LanguageServerPlugin) {
|
||||||
|
const i = this.plugins.indexOf(plugin)
|
||||||
|
if (i === -1) return
|
||||||
|
this.plugins.splice(i, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
private request<K extends keyof LSPRequestMap>(
|
||||||
|
method: K,
|
||||||
|
params: LSPRequestMap[K][0]
|
||||||
|
): Promise<LSPRequestMap[K][1]> {
|
||||||
|
return this.client.request(method, params) as Promise<LSPRequestMap[K][1]>
|
||||||
|
}
|
||||||
|
|
||||||
|
private notify<K extends keyof LSPNotifyMap>(
|
||||||
|
method: K,
|
||||||
|
params: LSPNotifyMap[K]
|
||||||
|
): void {
|
||||||
|
return this.client.notify(method, params)
|
||||||
|
}
|
||||||
|
|
||||||
|
private processNotification(notification: Notification) {
|
||||||
|
for (const plugin of this.plugins) plugin.processNotification(notification)
|
||||||
|
}
|
||||||
|
}
|
36
src/editor/lsp/language.ts
Normal file
36
src/editor/lsp/language.ts
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
// Code mirror language implementation for kcl.
|
||||||
|
|
||||||
|
import {
|
||||||
|
Language,
|
||||||
|
defineLanguageFacet,
|
||||||
|
LanguageSupport,
|
||||||
|
} from '@codemirror/language'
|
||||||
|
import { LanguageServerClient } from '.'
|
||||||
|
import { kclPlugin } from './plugin'
|
||||||
|
import type * as LSP from 'vscode-languageserver-protocol'
|
||||||
|
import { parser as jsParser } from '@lezer/javascript'
|
||||||
|
|
||||||
|
const data = defineLanguageFacet({})
|
||||||
|
|
||||||
|
export interface LanguageOptions {
|
||||||
|
workspaceFolders: LSP.WorkspaceFolder[] | null
|
||||||
|
documentUri: string
|
||||||
|
client: LanguageServerClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function kclLanguage(options: LanguageOptions): LanguageSupport {
|
||||||
|
// For now let's use the javascript parser.
|
||||||
|
// It works really well and has good syntax highlighting.
|
||||||
|
// We can use our lsp for the rest.
|
||||||
|
const lang = new Language(data, jsParser, [], 'kcl')
|
||||||
|
|
||||||
|
// Create our supporting extension.
|
||||||
|
const kclLsp = kclPlugin({
|
||||||
|
documentUri: options.documentUri,
|
||||||
|
workspaceFolders: options.workspaceFolders,
|
||||||
|
allowHTMLContent: true,
|
||||||
|
client: options.client,
|
||||||
|
})
|
||||||
|
|
||||||
|
return new LanguageSupport(lang, [kclLsp])
|
||||||
|
}
|
168
src/editor/lsp/parser.ts
Normal file
168
src/editor/lsp/parser.ts
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
// Extends the codemirror Parser for kcl.
|
||||||
|
|
||||||
|
import {
|
||||||
|
Parser,
|
||||||
|
Input,
|
||||||
|
TreeFragment,
|
||||||
|
PartialParse,
|
||||||
|
Tree,
|
||||||
|
NodeType,
|
||||||
|
NodeSet,
|
||||||
|
} from '@lezer/common'
|
||||||
|
import { LanguageServerClient } from '.'
|
||||||
|
import { posToOffset } from './plugin'
|
||||||
|
import { SemanticToken } from './semantic_tokens'
|
||||||
|
import { DocInput } from '@codemirror/language'
|
||||||
|
import { tags, styleTags } from '@lezer/highlight'
|
||||||
|
|
||||||
|
export default class KclParser extends Parser {
|
||||||
|
private client: LanguageServerClient
|
||||||
|
|
||||||
|
constructor(client: LanguageServerClient) {
|
||||||
|
super()
|
||||||
|
this.client = client
|
||||||
|
}
|
||||||
|
|
||||||
|
createParse(
|
||||||
|
input: Input,
|
||||||
|
fragments: readonly TreeFragment[],
|
||||||
|
ranges: readonly { from: number; to: number }[]
|
||||||
|
): PartialParse {
|
||||||
|
let parse: PartialParse = new Context(this, input, fragments, ranges)
|
||||||
|
return parse
|
||||||
|
}
|
||||||
|
|
||||||
|
getTokenTypes(): string[] {
|
||||||
|
return this.client.getServerCapabilities().semanticTokensProvider!.legend
|
||||||
|
.tokenTypes
|
||||||
|
}
|
||||||
|
|
||||||
|
getSemanticTokens(): SemanticToken[] {
|
||||||
|
return this.client.getSemanticTokens()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class Context implements PartialParse {
|
||||||
|
private parser: KclParser
|
||||||
|
private input: DocInput
|
||||||
|
private fragments: readonly TreeFragment[]
|
||||||
|
private ranges: readonly { from: number; to: number }[]
|
||||||
|
|
||||||
|
private nodeTypes: { [key: string]: NodeType }
|
||||||
|
stoppedAt: number = 0
|
||||||
|
|
||||||
|
private semanticTokens: SemanticToken[] = []
|
||||||
|
private currentLine: number = 0
|
||||||
|
private currentColumn: number = 0
|
||||||
|
private nodeSet: NodeSet
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
/// The parser configuration used.
|
||||||
|
parser: KclParser,
|
||||||
|
input: Input,
|
||||||
|
fragments: readonly TreeFragment[],
|
||||||
|
ranges: readonly { from: number; to: number }[]
|
||||||
|
) {
|
||||||
|
this.parser = parser
|
||||||
|
this.input = input as DocInput
|
||||||
|
this.fragments = fragments
|
||||||
|
this.ranges = ranges
|
||||||
|
|
||||||
|
// Iterate over the semantic token types and create a node type for each.
|
||||||
|
this.nodeTypes = {}
|
||||||
|
let nodeArray: NodeType[] = []
|
||||||
|
this.parser.getTokenTypes().forEach((tokenType, index) => {
|
||||||
|
const nodeType = NodeType.define({
|
||||||
|
id: index,
|
||||||
|
name: tokenType,
|
||||||
|
// props: [this.styleTags],
|
||||||
|
})
|
||||||
|
this.nodeTypes[tokenType] = nodeType
|
||||||
|
nodeArray.push(nodeType)
|
||||||
|
})
|
||||||
|
|
||||||
|
this.semanticTokens = this.parser.getSemanticTokens()
|
||||||
|
const styles = styleTags({
|
||||||
|
number: tags.number,
|
||||||
|
variable: tags.variableName,
|
||||||
|
operator: tags.operator,
|
||||||
|
keyword: tags.keyword,
|
||||||
|
string: tags.string,
|
||||||
|
comment: tags.comment,
|
||||||
|
function: tags.function(tags.variableName),
|
||||||
|
})
|
||||||
|
this.nodeSet = new NodeSet(nodeArray).extend(styles)
|
||||||
|
}
|
||||||
|
|
||||||
|
get parsedPos(): number {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
advance(): Tree | null {
|
||||||
|
if (this.semanticTokens.length === 0) {
|
||||||
|
return new Tree(NodeType.none, [], [], 0)
|
||||||
|
}
|
||||||
|
const tree = this.createTree(this.semanticTokens[0], 0)
|
||||||
|
this.stoppedAt = this.input.doc.length
|
||||||
|
return tree
|
||||||
|
}
|
||||||
|
|
||||||
|
createTree(token: SemanticToken, index: number): Tree {
|
||||||
|
const changedLine = token.delta_line !== 0
|
||||||
|
this.currentLine += token.delta_line
|
||||||
|
if (changedLine) {
|
||||||
|
this.currentColumn = 0
|
||||||
|
}
|
||||||
|
this.currentColumn += token.delta_start
|
||||||
|
|
||||||
|
// Let's get our position relative to the start of the file.
|
||||||
|
let currentPosition = posToOffset(this.input.doc, {
|
||||||
|
line: this.currentLine,
|
||||||
|
character: this.currentColumn,
|
||||||
|
})
|
||||||
|
|
||||||
|
const nodeType = this.nodeSet.types[this.nodeTypes[token.token_type].id]
|
||||||
|
|
||||||
|
if (currentPosition === undefined) {
|
||||||
|
// This is bad and weird.
|
||||||
|
return new Tree(nodeType, [], [], token.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (index >= this.semanticTokens.length - 1) {
|
||||||
|
// We have no children.
|
||||||
|
return new Tree(nodeType, [], [], token.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextIndex = index + 1
|
||||||
|
const nextToken = this.semanticTokens[nextIndex]
|
||||||
|
const changedLineNext = nextToken.delta_line !== 0
|
||||||
|
const nextLine = this.currentLine + nextToken.delta_line
|
||||||
|
const nextColumn = changedLineNext
|
||||||
|
? nextToken.delta_start
|
||||||
|
: this.currentColumn + nextToken.delta_start
|
||||||
|
const nextPosition = posToOffset(this.input.doc, {
|
||||||
|
line: nextLine,
|
||||||
|
character: nextColumn,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (nextPosition === undefined) {
|
||||||
|
// This is bad and weird.
|
||||||
|
return new Tree(nodeType, [], [], token.length)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let's get the
|
||||||
|
|
||||||
|
return new Tree(
|
||||||
|
nodeType,
|
||||||
|
[this.createTree(nextToken, nextIndex)],
|
||||||
|
|
||||||
|
// The positions (offsets relative to the start of this tree) of the children.
|
||||||
|
[nextPosition - currentPosition],
|
||||||
|
token.length
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
stopAt(pos: number) {
|
||||||
|
this.stoppedAt = pos
|
||||||
|
}
|
||||||
|
}
|
360
src/editor/lsp/plugin.ts
Normal file
360
src/editor/lsp/plugin.ts
Normal file
@ -0,0 +1,360 @@
|
|||||||
|
import { autocompletion, completeFromList } from '@codemirror/autocomplete'
|
||||||
|
import { setDiagnostics } from '@codemirror/lint'
|
||||||
|
import { Facet } from '@codemirror/state'
|
||||||
|
import {
|
||||||
|
EditorView,
|
||||||
|
ViewPlugin,
|
||||||
|
Tooltip,
|
||||||
|
hoverTooltip,
|
||||||
|
tooltips,
|
||||||
|
} from '@codemirror/view'
|
||||||
|
import {
|
||||||
|
DiagnosticSeverity,
|
||||||
|
CompletionItemKind,
|
||||||
|
CompletionTriggerKind,
|
||||||
|
} from 'vscode-languageserver-protocol'
|
||||||
|
|
||||||
|
import type {
|
||||||
|
Completion,
|
||||||
|
CompletionContext,
|
||||||
|
CompletionResult,
|
||||||
|
} from '@codemirror/autocomplete'
|
||||||
|
import type { PublishDiagnosticsParams } from 'vscode-languageserver-protocol'
|
||||||
|
import type { ViewUpdate, PluginValue } from '@codemirror/view'
|
||||||
|
import type { Text } from '@codemirror/state'
|
||||||
|
import type * as LSP from 'vscode-languageserver-protocol'
|
||||||
|
import { LanguageServerClient, Notification } from '.'
|
||||||
|
import { Marked } from '@ts-stack/markdown'
|
||||||
|
|
||||||
|
const changesDelay = 500
|
||||||
|
|
||||||
|
const CompletionItemKindMap = Object.fromEntries(
|
||||||
|
Object.entries(CompletionItemKind).map(([key, value]) => [value, key])
|
||||||
|
) as Record<CompletionItemKind, string>
|
||||||
|
|
||||||
|
const useLast = (values: readonly any[]) => values.reduce((_, v) => v, '')
|
||||||
|
const documentUri = Facet.define<string, string>({ combine: useLast })
|
||||||
|
const languageId = Facet.define<string, string>({ combine: useLast })
|
||||||
|
const client = Facet.define<LanguageServerClient, LanguageServerClient>({
|
||||||
|
combine: useLast,
|
||||||
|
})
|
||||||
|
|
||||||
|
export interface LanguageServerOptions {
|
||||||
|
workspaceFolders: LSP.WorkspaceFolder[] | null
|
||||||
|
documentUri: string
|
||||||
|
allowHTMLContent: boolean
|
||||||
|
client: LanguageServerClient
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LanguageServerPlugin implements PluginValue {
|
||||||
|
public client: LanguageServerClient
|
||||||
|
|
||||||
|
private documentUri: string
|
||||||
|
private languageId: string
|
||||||
|
private documentVersion: number
|
||||||
|
|
||||||
|
private changesTimeout: number
|
||||||
|
|
||||||
|
constructor(private view: EditorView, private allowHTMLContent: boolean) {
|
||||||
|
this.client = this.view.state.facet(client)
|
||||||
|
this.documentUri = this.view.state.facet(documentUri)
|
||||||
|
this.languageId = this.view.state.facet(languageId)
|
||||||
|
this.documentVersion = 0
|
||||||
|
this.changesTimeout = 0
|
||||||
|
|
||||||
|
this.client.attachPlugin(this)
|
||||||
|
|
||||||
|
this.initialize({
|
||||||
|
documentText: this.view.state.doc.toString(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
update({ docChanged }: ViewUpdate) {
|
||||||
|
if (!docChanged) return
|
||||||
|
if (this.changesTimeout) clearTimeout(this.changesTimeout)
|
||||||
|
this.changesTimeout = window.setTimeout(() => {
|
||||||
|
this.sendChange({
|
||||||
|
documentText: this.view.state.doc.toString(),
|
||||||
|
})
|
||||||
|
}, changesDelay)
|
||||||
|
}
|
||||||
|
|
||||||
|
destroy() {
|
||||||
|
this.client.detachPlugin(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
async initialize({ documentText }: { documentText: string }) {
|
||||||
|
if (this.client.initializePromise) {
|
||||||
|
await this.client.initializePromise
|
||||||
|
}
|
||||||
|
this.client.textDocumentDidOpen({
|
||||||
|
textDocument: {
|
||||||
|
uri: this.documentUri,
|
||||||
|
languageId: this.languageId,
|
||||||
|
text: documentText,
|
||||||
|
version: this.documentVersion,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async sendChange({ documentText }: { documentText: string }) {
|
||||||
|
if (!this.client.ready) return
|
||||||
|
try {
|
||||||
|
await this.client.textDocumentDidChange({
|
||||||
|
textDocument: {
|
||||||
|
uri: this.documentUri,
|
||||||
|
version: this.documentVersion++,
|
||||||
|
},
|
||||||
|
contentChanges: [{ text: documentText }],
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
console.error(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
requestDiagnostics(view: EditorView) {
|
||||||
|
this.sendChange({ documentText: view.state.doc.toString() })
|
||||||
|
}
|
||||||
|
|
||||||
|
async requestHoverTooltip(
|
||||||
|
view: EditorView,
|
||||||
|
{ line, character }: { line: number; character: number }
|
||||||
|
): Promise<Tooltip | null> {
|
||||||
|
if (
|
||||||
|
!this.client.ready ||
|
||||||
|
!this.client.getServerCapabilities().hoverProvider
|
||||||
|
)
|
||||||
|
return null
|
||||||
|
|
||||||
|
this.sendChange({ documentText: view.state.doc.toString() })
|
||||||
|
const result = await this.client.textDocumentHover({
|
||||||
|
textDocument: { uri: this.documentUri },
|
||||||
|
position: { line, character },
|
||||||
|
})
|
||||||
|
if (!result) return null
|
||||||
|
const { contents, range } = result
|
||||||
|
let pos = posToOffset(view.state.doc, { line, character })!
|
||||||
|
let end: number | undefined
|
||||||
|
if (range) {
|
||||||
|
pos = posToOffset(view.state.doc, range.start)!
|
||||||
|
end = posToOffset(view.state.doc, range.end)
|
||||||
|
}
|
||||||
|
if (pos === null) return null
|
||||||
|
const dom = document.createElement('div')
|
||||||
|
dom.classList.add('documentation')
|
||||||
|
if (this.allowHTMLContent) dom.innerHTML = formatContents(contents)
|
||||||
|
else dom.textContent = formatContents(contents)
|
||||||
|
return { pos, end, create: (view) => ({ dom }), above: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
async requestCompletion(
|
||||||
|
context: CompletionContext,
|
||||||
|
{ line, character }: { line: number; character: number },
|
||||||
|
{
|
||||||
|
triggerKind,
|
||||||
|
triggerCharacter,
|
||||||
|
}: {
|
||||||
|
triggerKind: CompletionTriggerKind
|
||||||
|
triggerCharacter: string | undefined
|
||||||
|
}
|
||||||
|
): Promise<CompletionResult | null> {
|
||||||
|
if (
|
||||||
|
!this.client.ready ||
|
||||||
|
!this.client.getServerCapabilities().completionProvider
|
||||||
|
)
|
||||||
|
return null
|
||||||
|
|
||||||
|
this.sendChange({
|
||||||
|
documentText: context.state.doc.toString(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await this.client.textDocumentCompletion({
|
||||||
|
textDocument: { uri: this.documentUri },
|
||||||
|
position: { line, character },
|
||||||
|
context: {
|
||||||
|
triggerKind,
|
||||||
|
triggerCharacter,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!result) return null
|
||||||
|
|
||||||
|
const items = 'items' in result ? result.items : result
|
||||||
|
|
||||||
|
let options = items.map(
|
||||||
|
({
|
||||||
|
detail,
|
||||||
|
label,
|
||||||
|
labelDetails,
|
||||||
|
kind,
|
||||||
|
textEdit,
|
||||||
|
documentation,
|
||||||
|
deprecated,
|
||||||
|
insertText,
|
||||||
|
insertTextFormat,
|
||||||
|
sortText,
|
||||||
|
filterText,
|
||||||
|
}) => {
|
||||||
|
const completion: Completion & {
|
||||||
|
filterText: string
|
||||||
|
sortText?: string
|
||||||
|
apply: string
|
||||||
|
} = {
|
||||||
|
label,
|
||||||
|
detail: labelDetails ? labelDetails.detail : detail,
|
||||||
|
apply: label,
|
||||||
|
type: kind && CompletionItemKindMap[kind].toLowerCase(),
|
||||||
|
sortText: sortText ?? label,
|
||||||
|
filterText: filterText ?? label,
|
||||||
|
}
|
||||||
|
if (documentation) {
|
||||||
|
completion.info = () => {
|
||||||
|
const htmlString = formatContents(documentation)
|
||||||
|
const htmlNode = document.createElement('div')
|
||||||
|
htmlNode.style.display = 'contents'
|
||||||
|
htmlNode.innerHTML = htmlString
|
||||||
|
return { dom: htmlNode }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return completion
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return completeFromList(options)(context)
|
||||||
|
}
|
||||||
|
|
||||||
|
processNotification(notification: Notification) {
|
||||||
|
try {
|
||||||
|
switch (notification.method) {
|
||||||
|
case 'textDocument/publishDiagnostics':
|
||||||
|
this.processDiagnostics(notification.params)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processDiagnostics(params: PublishDiagnosticsParams) {
|
||||||
|
if (params.uri !== this.documentUri) return
|
||||||
|
|
||||||
|
const diagnostics = params.diagnostics
|
||||||
|
.map(({ range, message, severity }) => ({
|
||||||
|
from: posToOffset(this.view.state.doc, range.start)!,
|
||||||
|
to: posToOffset(this.view.state.doc, range.end)!,
|
||||||
|
severity: (
|
||||||
|
{
|
||||||
|
[DiagnosticSeverity.Error]: 'error',
|
||||||
|
[DiagnosticSeverity.Warning]: 'warning',
|
||||||
|
[DiagnosticSeverity.Information]: 'info',
|
||||||
|
[DiagnosticSeverity.Hint]: 'info',
|
||||||
|
} as const
|
||||||
|
)[severity!],
|
||||||
|
message,
|
||||||
|
}))
|
||||||
|
.filter(
|
||||||
|
({ from, to }) =>
|
||||||
|
from !== null && to !== null && from !== undefined && to !== undefined
|
||||||
|
)
|
||||||
|
.sort((a, b) => {
|
||||||
|
switch (true) {
|
||||||
|
case a.from < b.from:
|
||||||
|
return -1
|
||||||
|
case a.from > b.from:
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
})
|
||||||
|
|
||||||
|
this.view.dispatch(setDiagnostics(this.view.state, diagnostics))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function kclPlugin(options: LanguageServerOptions) {
|
||||||
|
let plugin: LanguageServerPlugin | null = null
|
||||||
|
|
||||||
|
return [
|
||||||
|
client.of(options.client),
|
||||||
|
documentUri.of(options.documentUri),
|
||||||
|
languageId.of('kcl'),
|
||||||
|
ViewPlugin.define(
|
||||||
|
(view) =>
|
||||||
|
(plugin = new LanguageServerPlugin(view, options.allowHTMLContent))
|
||||||
|
),
|
||||||
|
hoverTooltip(
|
||||||
|
(view, pos) =>
|
||||||
|
plugin?.requestHoverTooltip(view, offsetToPos(view.state.doc, pos)) ??
|
||||||
|
null
|
||||||
|
),
|
||||||
|
tooltips({
|
||||||
|
position: 'absolute',
|
||||||
|
}),
|
||||||
|
autocompletion({
|
||||||
|
override: [
|
||||||
|
async (context) => {
|
||||||
|
if (plugin == null) return null
|
||||||
|
|
||||||
|
const { state, pos, explicit } = context
|
||||||
|
const line = state.doc.lineAt(pos)
|
||||||
|
let trigKind: CompletionTriggerKind = CompletionTriggerKind.Invoked
|
||||||
|
let trigChar: string | undefined
|
||||||
|
if (
|
||||||
|
!explicit &&
|
||||||
|
plugin.client
|
||||||
|
.getServerCapabilities()
|
||||||
|
.completionProvider?.triggerCharacters?.includes(
|
||||||
|
line.text[pos - line.from - 1]
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
trigKind = CompletionTriggerKind.TriggerCharacter
|
||||||
|
trigChar = line.text[pos - line.from - 1]
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
trigKind === CompletionTriggerKind.Invoked &&
|
||||||
|
!context.matchBefore(/\w+$/)
|
||||||
|
) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return await plugin.requestCompletion(
|
||||||
|
context,
|
||||||
|
offsetToPos(state.doc, pos),
|
||||||
|
{
|
||||||
|
triggerKind: trigKind,
|
||||||
|
triggerCharacter: trigChar,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function posToOffset(
|
||||||
|
doc: Text,
|
||||||
|
pos: { line: number; character: number }
|
||||||
|
): number | undefined {
|
||||||
|
if (pos.line >= doc.lines) return
|
||||||
|
const offset = doc.line(pos.line + 1).from + pos.character
|
||||||
|
if (offset > doc.length) return
|
||||||
|
return offset
|
||||||
|
}
|
||||||
|
|
||||||
|
function offsetToPos(doc: Text, offset: number) {
|
||||||
|
const line = doc.lineAt(offset)
|
||||||
|
return {
|
||||||
|
line: line.number - 1,
|
||||||
|
character: offset - line.from,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatContents(
|
||||||
|
contents: LSP.MarkupContent | LSP.MarkedString | LSP.MarkedString[]
|
||||||
|
): string {
|
||||||
|
if (Array.isArray(contents)) {
|
||||||
|
return contents.map((c) => formatContents(c) + '\n\n').join('')
|
||||||
|
} else if (typeof contents === 'string') {
|
||||||
|
return Marked.parse(contents)
|
||||||
|
} else {
|
||||||
|
return Marked.parse(contents.value)
|
||||||
|
}
|
||||||
|
}
|
51
src/editor/lsp/semantic_tokens.ts
Normal file
51
src/editor/lsp/semantic_tokens.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import type * as LSP from 'vscode-languageserver-protocol'
|
||||||
|
|
||||||
|
export class SemanticToken {
|
||||||
|
delta_line: number
|
||||||
|
delta_start: number
|
||||||
|
length: number
|
||||||
|
token_type: string
|
||||||
|
token_modifiers_bitset: string
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
delta_line = 0,
|
||||||
|
delta_start = 0,
|
||||||
|
length = 0,
|
||||||
|
token_type = '',
|
||||||
|
token_modifiers_bitset = ''
|
||||||
|
) {
|
||||||
|
this.delta_line = delta_line
|
||||||
|
this.delta_start = delta_start
|
||||||
|
this.length = length
|
||||||
|
this.token_type = token_type
|
||||||
|
this.token_modifiers_bitset = token_modifiers_bitset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deserializeTokens(
|
||||||
|
data: number[],
|
||||||
|
semanticTokensProvider?: LSP.SemanticTokensOptions
|
||||||
|
): SemanticToken[] {
|
||||||
|
if (!semanticTokensProvider) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
// Check if data length is divisible by 5
|
||||||
|
if (data.length % 5 !== 0) {
|
||||||
|
throw new Error('Length is not divisible by 5')
|
||||||
|
}
|
||||||
|
|
||||||
|
const tokens = []
|
||||||
|
for (let i = 0; i < data.length; i += 5) {
|
||||||
|
tokens.push(
|
||||||
|
new SemanticToken(
|
||||||
|
data[i],
|
||||||
|
data[i + 1],
|
||||||
|
data[i + 2],
|
||||||
|
semanticTokensProvider.legend.tokenTypes[data[i + 3]],
|
||||||
|
semanticTokensProvider.legend.tokenModifiers[data[i + 4]]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
}
|
80
src/editor/lsp/server-capability-registration.ts
Normal file
80
src/editor/lsp/server-capability-registration.ts
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import {
|
||||||
|
Registration,
|
||||||
|
ServerCapabilities,
|
||||||
|
Unregistration,
|
||||||
|
} from 'vscode-languageserver-protocol'
|
||||||
|
|
||||||
|
interface IFlexibleServerCapabilities extends ServerCapabilities {
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IMethodServerCapabilityProviderDictionary {
|
||||||
|
[key: string]: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const ServerCapabilitiesProviders: IMethodServerCapabilityProviderDictionary = {
|
||||||
|
'textDocument/hover': 'hoverProvider',
|
||||||
|
'textDocument/completion': 'completionProvider',
|
||||||
|
'textDocument/signatureHelp': 'signatureHelpProvider',
|
||||||
|
'textDocument/definition': 'definitionProvider',
|
||||||
|
'textDocument/typeDefinition': 'typeDefinitionProvider',
|
||||||
|
'textDocument/implementation': 'implementationProvider',
|
||||||
|
'textDocument/references': 'referencesProvider',
|
||||||
|
'textDocument/documentHighlight': 'documentHighlightProvider',
|
||||||
|
'textDocument/documentSymbol': 'documentSymbolProvider',
|
||||||
|
'textDocument/workspaceSymbol': 'workspaceSymbolProvider',
|
||||||
|
'textDocument/codeAction': 'codeActionProvider',
|
||||||
|
'textDocument/codeLens': 'codeLensProvider',
|
||||||
|
'textDocument/documentFormatting': 'documentFormattingProvider',
|
||||||
|
'textDocument/documentRangeFormatting': 'documentRangeFormattingProvider',
|
||||||
|
'textDocument/documentOnTypeFormatting': 'documentOnTypeFormattingProvider',
|
||||||
|
'textDocument/rename': 'renameProvider',
|
||||||
|
'textDocument/documentLink': 'documentLinkProvider',
|
||||||
|
'textDocument/color': 'colorProvider',
|
||||||
|
'textDocument/foldingRange': 'foldingRangeProvider',
|
||||||
|
'textDocument/declaration': 'declarationProvider',
|
||||||
|
'textDocument/executeCommand': 'executeCommandProvider',
|
||||||
|
}
|
||||||
|
|
||||||
|
function registerServerCapability(
|
||||||
|
serverCapabilities: ServerCapabilities,
|
||||||
|
registration: Registration
|
||||||
|
): ServerCapabilities {
|
||||||
|
const serverCapabilitiesCopy = JSON.parse(
|
||||||
|
JSON.stringify(serverCapabilities)
|
||||||
|
) as IFlexibleServerCapabilities
|
||||||
|
const { method, registerOptions } = registration
|
||||||
|
const providerName = ServerCapabilitiesProviders[method]
|
||||||
|
|
||||||
|
if (providerName) {
|
||||||
|
if (!registerOptions) {
|
||||||
|
serverCapabilitiesCopy[providerName] = true
|
||||||
|
} else {
|
||||||
|
serverCapabilitiesCopy[providerName] = Object.assign(
|
||||||
|
{},
|
||||||
|
JSON.parse(JSON.stringify(registerOptions))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error('Could not register server capability.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return serverCapabilitiesCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
function unregisterServerCapability(
|
||||||
|
serverCapabilities: ServerCapabilities,
|
||||||
|
unregistration: Unregistration
|
||||||
|
): ServerCapabilities {
|
||||||
|
const serverCapabilitiesCopy = JSON.parse(
|
||||||
|
JSON.stringify(serverCapabilities)
|
||||||
|
) as IFlexibleServerCapabilities
|
||||||
|
const { method } = unregistration
|
||||||
|
const providerName = ServerCapabilitiesProviders[method]
|
||||||
|
|
||||||
|
delete serverCapabilitiesCopy[providerName]
|
||||||
|
|
||||||
|
return serverCapabilitiesCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
export { registerServerCapability, unregisterServerCapability }
|
42
src/editor/lsp/server.ts
Normal file
42
src/editor/lsp/server.ts
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import init, {
|
||||||
|
InitOutput,
|
||||||
|
lsp_run,
|
||||||
|
ServerConfig,
|
||||||
|
} from '../../wasm-lib/pkg/wasm_lib'
|
||||||
|
import { FromServer, IntoServer } from './codec'
|
||||||
|
|
||||||
|
let server: null | Server
|
||||||
|
|
||||||
|
export default class Server {
|
||||||
|
readonly initOutput: InitOutput
|
||||||
|
readonly #intoServer: IntoServer
|
||||||
|
readonly #fromServer: FromServer
|
||||||
|
|
||||||
|
private constructor(
|
||||||
|
initOutput: InitOutput,
|
||||||
|
intoServer: IntoServer,
|
||||||
|
fromServer: FromServer
|
||||||
|
) {
|
||||||
|
this.initOutput = initOutput
|
||||||
|
this.#intoServer = intoServer
|
||||||
|
this.#fromServer = fromServer
|
||||||
|
}
|
||||||
|
|
||||||
|
static async initialize(
|
||||||
|
intoServer: IntoServer,
|
||||||
|
fromServer: FromServer
|
||||||
|
): Promise<Server> {
|
||||||
|
if (null == server) {
|
||||||
|
const initOutput = await init()
|
||||||
|
server = new Server(initOutput, intoServer, fromServer)
|
||||||
|
} else {
|
||||||
|
console.warn('Server already initialized; ignoring')
|
||||||
|
}
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
async start(): Promise<void> {
|
||||||
|
const config = new ServerConfig(this.#intoServer, this.#fromServer)
|
||||||
|
await lsp_run(config)
|
||||||
|
}
|
||||||
|
}
|
21
src/editor/lsp/tracer.ts
Normal file
21
src/editor/lsp/tracer.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { Message } from 'vscode-languageserver-protocol'
|
||||||
|
|
||||||
|
const env = import.meta.env.MODE
|
||||||
|
|
||||||
|
export default class Tracer {
|
||||||
|
static client(message: string): void {
|
||||||
|
// These are really noisy, so we have a special env var for them.
|
||||||
|
if (env === 'lsp_tracing') {
|
||||||
|
console.log('lsp client message', message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static server(input: string | Message): void {
|
||||||
|
// These are really noisy, so we have a special env var for them.
|
||||||
|
if (env === 'lsp_tracing') {
|
||||||
|
const message: string =
|
||||||
|
typeof input === 'string' ? input : JSON.stringify(input)
|
||||||
|
console.log('lsp server message', message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -82,8 +82,22 @@ code {
|
|||||||
monospace;
|
monospace;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.full-height-subtract {
|
||||||
|
--height-subtract: 2.25rem;
|
||||||
|
height: 100%;
|
||||||
|
max-height: calc(100% - var(--height-subtract));
|
||||||
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-editor {
|
#code-mirror-override .cm-editor {
|
||||||
@apply bg-transparent;
|
@apply h-full bg-transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-scroller {
|
||||||
|
@apply h-full;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-scroller::-webkit-scrollbar {
|
||||||
|
@apply h-0;
|
||||||
}
|
}
|
||||||
|
|
||||||
#code-mirror-override .cm-activeLine,
|
#code-mirror-override .cm-activeLine,
|
||||||
@ -132,3 +146,45 @@ code {
|
|||||||
.react-json-view {
|
.react-json-view {
|
||||||
@apply bg-transparent !important;
|
@apply bg-transparent !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-tooltip {
|
||||||
|
@apply text-xs shadow-md;
|
||||||
|
@apply bg-chalkboard-10 text-chalkboard-80;
|
||||||
|
@apply rounded-sm border-solid border border-chalkboard-40/30 border-l-liquid-10;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark #code-mirror-override .cm-tooltip {
|
||||||
|
@apply bg-chalkboard-110 text-chalkboard-40;
|
||||||
|
@apply border-chalkboard-70/20 border-l-liquid-70;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-tooltip-hover {
|
||||||
|
@apply py-1 px-2 w-max max-w-md;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-completionInfo {
|
||||||
|
@apply px-4 rounded-l-none;
|
||||||
|
@apply bg-chalkboard-10 text-liquid-90;
|
||||||
|
@apply border-liquid-40/30;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dark #code-mirror-override .cm-completionInfo {
|
||||||
|
@apply bg-liquid-120 text-liquid-50;
|
||||||
|
@apply border-liquid-90/60;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-tooltip-autocomplete li {
|
||||||
|
@apply px-2 py-1;
|
||||||
|
}
|
||||||
|
#code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
|
||||||
|
@apply bg-liquid-10 text-liquid-110;
|
||||||
|
}
|
||||||
|
.dark #code-mirror-override .cm-tooltip-autocomplete li[aria-selected='true'] {
|
||||||
|
@apply bg-liquid-100 text-liquid-20;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-mirror-override .cm-content {
|
||||||
|
white-space: pre-wrap;
|
||||||
|
word-break: normal;
|
||||||
|
word-wrap: break-word;
|
||||||
|
}
|
||||||
|
@ -179,6 +179,9 @@ const newVar = myVar + 1
|
|||||||
name: 'aIdentifier',
|
name: 'aIdentifier',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
function: {
|
||||||
|
type: 'InMemory',
|
||||||
|
},
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -211,7 +214,6 @@ describe('testing function declaration', () => {
|
|||||||
type: 'FunctionExpression',
|
type: 'FunctionExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
end: 19,
|
end: 19,
|
||||||
id: null,
|
|
||||||
params: [],
|
params: [],
|
||||||
body: {
|
body: {
|
||||||
start: 17,
|
start: 17,
|
||||||
@ -250,7 +252,6 @@ describe('testing function declaration', () => {
|
|||||||
type: 'FunctionExpression',
|
type: 'FunctionExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
end: 39,
|
end: 39,
|
||||||
id: null,
|
|
||||||
params: [
|
params: [
|
||||||
{
|
{
|
||||||
type: 'Identifier',
|
type: 'Identifier',
|
||||||
@ -326,7 +327,6 @@ const myVar = funcN(1, 2)`
|
|||||||
type: 'FunctionExpression',
|
type: 'FunctionExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
end: 37,
|
end: 37,
|
||||||
id: null,
|
|
||||||
params: [
|
params: [
|
||||||
{
|
{
|
||||||
type: 'Identifier',
|
type: 'Identifier',
|
||||||
@ -416,6 +416,9 @@ const myVar = funcN(1, 2)`
|
|||||||
raw: '2',
|
raw: '2',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
function: {
|
||||||
|
type: 'InMemory',
|
||||||
|
},
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -485,6 +488,7 @@ describe('testing pipe operator special', () => {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -521,6 +525,7 @@ describe('testing pipe operator special', () => {
|
|||||||
},
|
},
|
||||||
{ type: 'PipeSubstitution', start: 59, end: 60 },
|
{ type: 'PipeSubstitution', start: 59, end: 60 },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -593,6 +598,7 @@ describe('testing pipe operator special', () => {
|
|||||||
},
|
},
|
||||||
{ type: 'PipeSubstitution', start: 105, end: 106 },
|
{ type: 'PipeSubstitution', start: 105, end: 106 },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -629,6 +635,7 @@ describe('testing pipe operator special', () => {
|
|||||||
},
|
},
|
||||||
{ type: 'PipeSubstitution', start: 128, end: 129 },
|
{ type: 'PipeSubstitution', start: 128, end: 129 },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -651,6 +658,9 @@ describe('testing pipe operator special', () => {
|
|||||||
},
|
},
|
||||||
{ type: 'PipeSubstitution', start: 143, end: 144 },
|
{ type: 'PipeSubstitution', start: 143, end: 144 },
|
||||||
],
|
],
|
||||||
|
function: {
|
||||||
|
type: 'InMemory',
|
||||||
|
},
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -730,6 +740,9 @@ describe('testing pipe operator special', () => {
|
|||||||
end: 35,
|
end: 35,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
function: {
|
||||||
|
type: 'InMemory',
|
||||||
|
},
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -1550,7 +1563,10 @@ const key = 'c'`
|
|||||||
type: 'NoneCodeNode',
|
type: 'NoneCodeNode',
|
||||||
start: code.indexOf('\n// this is a comment'),
|
start: code.indexOf('\n// this is a comment'),
|
||||||
end: code.indexOf('const key'),
|
end: code.indexOf('const key'),
|
||||||
value: '\n// this is a comment\n',
|
value: {
|
||||||
|
type: 'blockComment',
|
||||||
|
value: 'this is a comment',
|
||||||
|
},
|
||||||
}
|
}
|
||||||
const { nonCodeMeta } = parser_wasm(code)
|
const { nonCodeMeta } = parser_wasm(code)
|
||||||
expect(nonCodeMeta.noneCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
expect(nonCodeMeta.noneCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
||||||
@ -1560,7 +1576,9 @@ const key = 'c'`
|
|||||||
const { nonCodeMeta: nonCodeMeta2 } = parser_wasm(
|
const { nonCodeMeta: nonCodeMeta2 } = parser_wasm(
|
||||||
codeWithExtraStartWhitespace
|
codeWithExtraStartWhitespace
|
||||||
)
|
)
|
||||||
expect(nonCodeMeta2.noneCodeNodes[0].value).toBe(nonCodeMetaInstance.value)
|
expect(nonCodeMeta2.noneCodeNodes[0].value).toStrictEqual(
|
||||||
|
nonCodeMetaInstance.value
|
||||||
|
)
|
||||||
expect(nonCodeMeta2.noneCodeNodes[0].start).not.toBe(
|
expect(nonCodeMeta2.noneCodeNodes[0].start).not.toBe(
|
||||||
nonCodeMetaInstance.start
|
nonCodeMetaInstance.start
|
||||||
)
|
)
|
||||||
@ -1583,7 +1601,10 @@ const key = 'c'`
|
|||||||
type: 'NoneCodeNode',
|
type: 'NoneCodeNode',
|
||||||
start: 106,
|
start: 106,
|
||||||
end: 166,
|
end: 166,
|
||||||
value: ' /* this is\n a comment\n spanning a few lines */\n ',
|
value: {
|
||||||
|
type: 'blockComment',
|
||||||
|
value: 'this is\n a comment\n spanning a few lines',
|
||||||
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
it('comments in a pipe expression', () => {
|
it('comments in a pipe expression', () => {
|
||||||
@ -1603,7 +1624,10 @@ const key = 'c'`
|
|||||||
type: 'NoneCodeNode',
|
type: 'NoneCodeNode',
|
||||||
start: 125,
|
start: 125,
|
||||||
end: 141,
|
end: 141,
|
||||||
value: '\n// a comment\n ',
|
value: {
|
||||||
|
type: 'blockComment',
|
||||||
|
value: 'a comment',
|
||||||
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -1627,6 +1651,7 @@ describe('test UnaryExpression', () => {
|
|||||||
{ type: 'Literal', start: 19, end: 20, value: 4, raw: '4' },
|
{ type: 'Literal', start: 19, end: 20, value: 4, raw: '4' },
|
||||||
{ type: 'Literal', start: 22, end: 25, value: 100, raw: '100' },
|
{ type: 'Literal', start: 22, end: 25, value: 100, raw: '100' },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -1660,10 +1685,12 @@ describe('testing nested call expressions', () => {
|
|||||||
{ type: 'Literal', start: 34, end: 35, value: 5, raw: '5' },
|
{ type: 'Literal', start: 34, end: 35, value: 5, raw: '5' },
|
||||||
{ type: 'Literal', start: 37, end: 38, value: 3, raw: '3' },
|
{ type: 'Literal', start: 37, end: 38, value: 3, raw: '3' },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -1695,6 +1722,7 @@ describe('should recognise callExpresions in binaryExpressions', () => {
|
|||||||
},
|
},
|
||||||
{ type: 'PipeSubstitution', start: 25, end: 26 },
|
{ type: 'PipeSubstitution', start: 25, end: 26 },
|
||||||
],
|
],
|
||||||
|
function: expect.any(Object),
|
||||||
optional: false,
|
optional: false,
|
||||||
},
|
},
|
||||||
right: { type: 'Literal', value: 1, raw: '1', start: 30, end: 31 },
|
right: { type: 'Literal', value: 1, raw: '1', start: 30, end: 31 },
|
||||||
|
@ -36,14 +36,14 @@ export function addSketchTo(
|
|||||||
const _node = { ...node }
|
const _node = { ...node }
|
||||||
const _name = name || findUniqueName(node, 'part')
|
const _name = name || findUniqueName(node, 'part')
|
||||||
|
|
||||||
const startSketchAt = createCallExpression('startSketchAt', [
|
const startSketchAt = createCallExpressionStdLib('startSketchAt', [
|
||||||
createLiteral('default'),
|
createLiteral('default'),
|
||||||
])
|
])
|
||||||
const rotate = createCallExpression(axis === 'xz' ? 'rx' : 'ry', [
|
const rotate = createCallExpression(axis === 'xz' ? 'rx' : 'ry', [
|
||||||
createLiteral(90),
|
createLiteral(90),
|
||||||
createPipeSubstitution(),
|
createPipeSubstitution(),
|
||||||
])
|
])
|
||||||
const initialLineTo = createCallExpression('line', [
|
const initialLineTo = createCallExpressionStdLib('line', [
|
||||||
createLiteral('default'),
|
createLiteral('default'),
|
||||||
createPipeSubstitution(),
|
createPipeSubstitution(),
|
||||||
])
|
])
|
||||||
@ -112,7 +112,9 @@ function addToShow(node: Program, name: string): Program {
|
|||||||
const dumbyStartend = { start: 0, end: 0 }
|
const dumbyStartend = { start: 0, end: 0 }
|
||||||
const showCallIndex = getShowIndex(_node)
|
const showCallIndex = getShowIndex(_node)
|
||||||
if (showCallIndex === -1) {
|
if (showCallIndex === -1) {
|
||||||
const showCall = createCallExpression('show', [createIdentifier(name)])
|
const showCall = createCallExpressionStdLib('show', [
|
||||||
|
createIdentifier(name),
|
||||||
|
])
|
||||||
const showExpressionStatement: ExpressionStatement = {
|
const showExpressionStatement: ExpressionStatement = {
|
||||||
type: 'ExpressionStatement',
|
type: 'ExpressionStatement',
|
||||||
...dumbyStartend,
|
...dumbyStartend,
|
||||||
@ -124,7 +126,7 @@ function addToShow(node: Program, name: string): Program {
|
|||||||
const showCall = { ..._node.body[showCallIndex] } as ExpressionStatement
|
const showCall = { ..._node.body[showCallIndex] } as ExpressionStatement
|
||||||
const showCallArgs = (showCall.expression as CallExpression).arguments
|
const showCallArgs = (showCall.expression as CallExpression).arguments
|
||||||
const newShowCallArgs: Value[] = [...showCallArgs, createIdentifier(name)]
|
const newShowCallArgs: Value[] = [...showCallArgs, createIdentifier(name)]
|
||||||
const newShowExpression = createCallExpression('show', newShowCallArgs)
|
const newShowExpression = createCallExpressionStdLib('show', newShowCallArgs)
|
||||||
|
|
||||||
_node.body[showCallIndex] = {
|
_node.body[showCallIndex] = {
|
||||||
...showCall,
|
...showCall,
|
||||||
@ -225,7 +227,7 @@ export function extrudeSketch(
|
|||||||
const { node: variableDeclorator, shallowPath: pathToDecleration } =
|
const { node: variableDeclorator, shallowPath: pathToDecleration } =
|
||||||
getNodeFromPath<VariableDeclarator>(_node, pathToNode, 'VariableDeclarator')
|
getNodeFromPath<VariableDeclarator>(_node, pathToNode, 'VariableDeclarator')
|
||||||
|
|
||||||
const extrudeCall = createCallExpression('extrude', [
|
const extrudeCall = createCallExpressionStdLib('extrude', [
|
||||||
createLiteral(4),
|
createLiteral(4),
|
||||||
shouldPipe
|
shouldPipe
|
||||||
? createPipeSubstitution()
|
? createPipeSubstitution()
|
||||||
@ -313,15 +315,15 @@ export function sketchOnExtrudedFace(
|
|||||||
const newSketch = createVariableDeclaration(
|
const newSketch = createVariableDeclaration(
|
||||||
newSketchName,
|
newSketchName,
|
||||||
createPipeExpression([
|
createPipeExpression([
|
||||||
createCallExpression('startSketchAt', [
|
createCallExpressionStdLib('startSketchAt', [
|
||||||
createArrayExpression([createLiteral(0), createLiteral(0)]),
|
createArrayExpression([createLiteral(0), createLiteral(0)]),
|
||||||
]),
|
]),
|
||||||
createCallExpression('lineTo', [
|
createCallExpressionStdLib('lineTo', [
|
||||||
createArrayExpression([createLiteral(1), createLiteral(1)]),
|
createArrayExpression([createLiteral(1), createLiteral(1)]),
|
||||||
createPipeSubstitution(),
|
createPipeSubstitution(),
|
||||||
]),
|
]),
|
||||||
createCallExpression('transform', [
|
createCallExpression('transform', [
|
||||||
createCallExpression('getExtrudeWallTransform', [
|
createCallExpressionStdLib('getExtrudeWallTransform', [
|
||||||
createLiteral(tag),
|
createLiteral(tag),
|
||||||
createIdentifier(oldSketchName),
|
createIdentifier(oldSketchName),
|
||||||
]),
|
]),
|
||||||
@ -414,6 +416,40 @@ export function createPipeSubstitution(): PipeSubstitution {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function createCallExpressionStdLib(
|
||||||
|
name: string,
|
||||||
|
args: CallExpression['arguments']
|
||||||
|
): CallExpression {
|
||||||
|
return {
|
||||||
|
type: 'CallExpression',
|
||||||
|
start: 0,
|
||||||
|
end: 0,
|
||||||
|
callee: {
|
||||||
|
type: 'Identifier',
|
||||||
|
start: 0,
|
||||||
|
end: 0,
|
||||||
|
name,
|
||||||
|
},
|
||||||
|
function: {
|
||||||
|
type: 'StdLib',
|
||||||
|
func: {
|
||||||
|
// We only need the name here to map it back when it serializes
|
||||||
|
// to rust, don't worry about the rest.
|
||||||
|
name,
|
||||||
|
summary: '',
|
||||||
|
description: '',
|
||||||
|
tags: [],
|
||||||
|
returnValue: { type: '', required: false, name: '', schema: {} },
|
||||||
|
args: [],
|
||||||
|
unpublished: false,
|
||||||
|
deprecated: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
optional: false,
|
||||||
|
arguments: args,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function createCallExpression(
|
export function createCallExpression(
|
||||||
name: string,
|
name: string,
|
||||||
args: CallExpression['arguments']
|
args: CallExpression['arguments']
|
||||||
@ -428,6 +464,9 @@ export function createCallExpression(
|
|||||||
end: 0,
|
end: 0,
|
||||||
name,
|
name,
|
||||||
},
|
},
|
||||||
|
function: {
|
||||||
|
type: 'InMemory',
|
||||||
|
},
|
||||||
optional: false,
|
optional: false,
|
||||||
arguments: args,
|
arguments: args,
|
||||||
}
|
}
|
||||||
|
@ -45,8 +45,7 @@ const newVar = myVar + 1`
|
|||||||
expect(recasted).toBe(code.trim())
|
expect(recasted).toBe(code.trim())
|
||||||
})
|
})
|
||||||
it('test with function call', () => {
|
it('test with function call', () => {
|
||||||
const code = `
|
const code = `const myVar = "hello"
|
||||||
const myVar = "hello"
|
|
||||||
log(5, myVar)`
|
log(5, myVar)`
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
const recasted = recast(ast)
|
const recasted = recast(ast)
|
||||||
@ -71,8 +70,7 @@ log(5, myVar)`
|
|||||||
|> lineTo({ to: [1, 0], tag: "rightPath" }, %)
|
|> lineTo({ to: [1, 0], tag: "rightPath" }, %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|
|
||||||
show(mySketch)
|
show(mySketch)`
|
||||||
`
|
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
const recasted = recast(ast)
|
const recasted = recast(ast)
|
||||||
expect(recasted).toBe(code.trim())
|
expect(recasted).toBe(code.trim())
|
||||||
@ -186,8 +184,7 @@ const myVar2 = yo['a'][key2].c`
|
|||||||
|
|
||||||
describe('testing recasting with comments and whitespace', () => {
|
describe('testing recasting with comments and whitespace', () => {
|
||||||
it('code with comments', () => {
|
it('code with comments', () => {
|
||||||
const code = `
|
const code = `const yo = { a: { b: { c: '123' } } }
|
||||||
const yo = { a: { b: { c: '123' } } }
|
|
||||||
// this is a comment
|
// this is a comment
|
||||||
const key = 'c'`
|
const key = 'c'`
|
||||||
|
|
||||||
@ -197,20 +194,18 @@ const key = 'c'`
|
|||||||
expect(recasted).toBe(code)
|
expect(recasted).toBe(code)
|
||||||
})
|
})
|
||||||
it('code with comment and extra lines', () => {
|
it('code with comment and extra lines', () => {
|
||||||
const code = `
|
const code = `const yo = 'c'
|
||||||
const yo = 'c' /* this is
|
|
||||||
|
/* this is
|
||||||
a
|
a
|
||||||
comment */
|
comment */
|
||||||
|
|
||||||
const yo = 'bing'`
|
const yo = 'bing'`
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
const recasted = recast(ast)
|
const recasted = recast(ast)
|
||||||
expect(recasted).toBe(code)
|
expect(recasted).toBe(code)
|
||||||
})
|
})
|
||||||
it('comments at the start and end', () => {
|
it('comments at the start and end', () => {
|
||||||
const code = `
|
const code = `// this is a comment
|
||||||
// this is a comment
|
|
||||||
|
|
||||||
const yo = { a: { b: { c: '123' } } }
|
const yo = { a: { b: { c: '123' } } }
|
||||||
const key = 'c'
|
const key = 'c'
|
||||||
|
|
||||||
@ -220,12 +215,12 @@ const key = 'c'
|
|||||||
expect(recasted).toBe(code)
|
expect(recasted).toBe(code)
|
||||||
})
|
})
|
||||||
it('comments in a fn block', () => {
|
it('comments in a fn block', () => {
|
||||||
const code = `
|
const code = `const myFn = () => {
|
||||||
const myFn = () => {
|
|
||||||
// this is a comment
|
// this is a comment
|
||||||
const yo = { a: { b: { c: '123' } } } /* block
|
const yo = { a: { b: { c: '123' } } }
|
||||||
comment */
|
|
||||||
|
|
||||||
|
/* block
|
||||||
|
comment */
|
||||||
const key = 'c'
|
const key = 'c'
|
||||||
// this is also a comment
|
// this is also a comment
|
||||||
}`
|
}`
|
||||||
@ -269,7 +264,21 @@ const mySk1 = startSketchAt([0, 0])
|
|||||||
*/`
|
*/`
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
const recasted = recast(ast)
|
const recasted = recast(ast)
|
||||||
expect(recasted).toBe(code)
|
expect(recasted).toBe(`// comment at start
|
||||||
|
const mySk1 = startSketchAt([0, 0])
|
||||||
|
|> lineTo([1, 1], %)
|
||||||
|
// comment here
|
||||||
|
|> lineTo({ to: [0, 1], tag: 'myTag' }, %)
|
||||||
|
|> lineTo([1, 1], %)
|
||||||
|
/* and
|
||||||
|
here
|
||||||
|
|
||||||
|
a comment between pipe expression statements */
|
||||||
|
|> rx(90, %)
|
||||||
|
// and another with just white space between others below
|
||||||
|
|> ry(45, %)
|
||||||
|
|> rx(45, %)
|
||||||
|
// one more for good measure`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -295,7 +304,7 @@ describe('testing call Expressions in BinaryExpressions and UnaryExpressions', (
|
|||||||
it('with unaryExpression in sketch situation', () => {
|
it('with unaryExpression in sketch situation', () => {
|
||||||
const code = [
|
const code = [
|
||||||
'const part001 = startSketchAt([0, 0])',
|
'const part001 = startSketchAt([0, 0])',
|
||||||
'|> line([-2.21, -legLen(5, min(3, 999))], %)',
|
' |> line([-2.21, -legLen(5, min(3, 999))], %)',
|
||||||
].join('\n')
|
].join('\n')
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
const recasted = recast(ast)
|
const recasted = recast(ast)
|
||||||
@ -309,10 +318,10 @@ describe('it recasts wrapped object expressions in pipe bodies with correct inde
|
|||||||
|> line({ to: [0.62, 4.15], tag: 'seg01' }, %)
|
|> line({ to: [0.62, 4.15], tag: 'seg01' }, %)
|
||||||
|> line([2.77, -1.24], %)
|
|> line([2.77, -1.24], %)
|
||||||
|> angledLineThatIntersects({
|
|> angledLineThatIntersects({
|
||||||
angle: 201,
|
angle: 201,
|
||||||
offset: -1.35,
|
offset: -1.35,
|
||||||
intersectTag: 'seg01'
|
intersectTag: 'seg01'
|
||||||
}, %)
|
}, %)
|
||||||
|> line([-0.42, -1.72], %)
|
|> line([-0.42, -1.72], %)
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const { ast } = code2ast(code)
|
const { ast } = code2ast(code)
|
||||||
|
@ -97,11 +97,10 @@ describe('testing changeSketchArguments', () => {
|
|||||||
const lineAfterChange = 'lineTo([2, 3], %)'
|
const lineAfterChange = 'lineTo([2, 3], %)'
|
||||||
test('changeSketchArguments', async () => {
|
test('changeSketchArguments', async () => {
|
||||||
// Enable rotations #152
|
// Enable rotations #152
|
||||||
const genCode = (line: string) => `
|
const genCode = (line: string) => `const mySketch001 = startSketchAt([0, 0])
|
||||||
const mySketch001 = startSketchAt([0, 0])
|
|> ${line}
|
||||||
|> ${line}
|
|> lineTo([0.46, -5.82], %)
|
||||||
|> lineTo([0.46, -5.82], %)
|
// |> rx(45, %)
|
||||||
// |> rx(45, %)
|
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const code = genCode(lineToChange)
|
const code = genCode(lineToChange)
|
||||||
const expectedCode = genCode(lineAfterChange)
|
const expectedCode = genCode(lineAfterChange)
|
||||||
@ -160,8 +159,7 @@ show(mySketch001)`
|
|||||||
],
|
],
|
||||||
})
|
})
|
||||||
// Enable rotations #152
|
// Enable rotations #152
|
||||||
const expectedCode = `
|
const expectedCode = `const mySketch001 = startSketchAt([0, 0])
|
||||||
const mySketch001 = startSketchAt([0, 0])
|
|
||||||
// |> rx(45, %)
|
// |> rx(45, %)
|
||||||
|> lineTo([-1.59, -1.54], %)
|
|> lineTo([-1.59, -1.54], %)
|
||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
@ -175,12 +173,11 @@ describe('testing addTagForSketchOnFace', () => {
|
|||||||
it('needs to be in it', async () => {
|
it('needs to be in it', async () => {
|
||||||
const originalLine = 'lineTo([-1.59, -1.54], %)'
|
const originalLine = 'lineTo([-1.59, -1.54], %)'
|
||||||
// Enable rotations #152
|
// Enable rotations #152
|
||||||
const genCode = (line: string) => `
|
const genCode = (line: string) => `const mySketch001 = startSketchAt([0, 0])
|
||||||
const mySketch001 = startSketchAt([0, 0])
|
// |> rx(45, %)
|
||||||
// |> rx(45, %)
|
|> ${line}
|
||||||
|> ${line}
|
|> lineTo([0.46, -5.82], %)
|
||||||
|> lineTo([0.46, -5.82], %)
|
show(mySketch001)`
|
||||||
show(mySketch001)`
|
|
||||||
const code = genCode(originalLine)
|
const code = genCode(originalLine)
|
||||||
const ast = parser_wasm(code)
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
|
@ -59,20 +59,20 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
|
|||||||
` |> lineTo({ to: [1, 1], tag: 'abc1' }, %)`,
|
` |> lineTo({ to: [1, 1], tag: 'abc1' }, %)`,
|
||||||
` |> line({ to: [-2.04, -0.7], tag: 'abc2' }, %)`,
|
` |> line({ to: [-2.04, -0.7], tag: 'abc2' }, %)`,
|
||||||
` |> angledLine({`,
|
` |> angledLine({`,
|
||||||
` angle: 157,`,
|
` angle: 157,`,
|
||||||
` length: 1.69,`,
|
` length: 1.69,`,
|
||||||
` tag: 'abc3'`,
|
` tag: 'abc3'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
` |> angledLineOfXLength({`,
|
` |> angledLineOfXLength({`,
|
||||||
` angle: 217,`,
|
` angle: 217,`,
|
||||||
` length: 0.86,`,
|
` length: 0.86,`,
|
||||||
` tag: 'abc4'`,
|
` tag: 'abc4'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
` |> angledLineOfYLength({`,
|
` |> angledLineOfYLength({`,
|
||||||
` angle: 104,`,
|
` angle: 104,`,
|
||||||
` length: 1.58,`,
|
` length: 1.58,`,
|
||||||
` tag: 'abc5'`,
|
` tag: 'abc5'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
` |> angledLineToX({ angle: 55, to: -2.89, tag: 'abc6' }, %)`,
|
` |> angledLineToX({ angle: 55, to: -2.89, tag: 'abc6' }, %)`,
|
||||||
` |> angledLineToY({ angle: 330, to: 2.53, tag: 'abc7' }, %)`,
|
` |> angledLineToY({ angle: 330, to: 2.53, tag: 'abc7' }, %)`,
|
||||||
` |> xLine({ length: 1.47, tag: 'abc8' }, %)`,
|
` |> xLine({ length: 1.47, tag: 'abc8' }, %)`,
|
||||||
@ -144,10 +144,10 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
|
|||||||
inputCode: bigExample,
|
inputCode: bigExample,
|
||||||
callToSwap: [
|
callToSwap: [
|
||||||
`angledLine({`,
|
`angledLine({`,
|
||||||
` angle: 157,`,
|
` angle: 157,`,
|
||||||
` length: 1.69,`,
|
` length: 1.69,`,
|
||||||
` tag: 'abc3'`,
|
` tag: 'abc3'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
].join('\n'),
|
].join('\n'),
|
||||||
constraintType: 'horizontal',
|
constraintType: 'horizontal',
|
||||||
})
|
})
|
||||||
@ -172,10 +172,10 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
|
|||||||
inputCode: bigExample,
|
inputCode: bigExample,
|
||||||
callToSwap: [
|
callToSwap: [
|
||||||
`angledLineOfXLength({`,
|
`angledLineOfXLength({`,
|
||||||
` angle: 217,`,
|
` angle: 217,`,
|
||||||
` length: 0.86,`,
|
` length: 0.86,`,
|
||||||
` tag: 'abc4'`,
|
` tag: 'abc4'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
].join('\n'),
|
].join('\n'),
|
||||||
constraintType: 'horizontal',
|
constraintType: 'horizontal',
|
||||||
})
|
})
|
||||||
@ -201,10 +201,10 @@ describe('testing swaping out sketch calls with xLine/xLineTo', () => {
|
|||||||
inputCode: bigExample,
|
inputCode: bigExample,
|
||||||
callToSwap: [
|
callToSwap: [
|
||||||
`angledLineOfYLength({`,
|
`angledLineOfYLength({`,
|
||||||
` angle: 104,`,
|
` angle: 104,`,
|
||||||
` length: 1.58,`,
|
` length: 1.58,`,
|
||||||
` tag: 'abc5'`,
|
` tag: 'abc5'`,
|
||||||
` }, %)`,
|
` }, %)`,
|
||||||
].join('\n'),
|
].join('\n'),
|
||||||
constraintType: 'vertical',
|
constraintType: 'vertical',
|
||||||
})
|
})
|
||||||
|
@ -133,64 +133,64 @@ const myAng2 = 134
|
|||||||
const part001 = startSketchAt([0, 0])
|
const part001 = startSketchAt([0, 0])
|
||||||
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|
|> line({ to: [1, 3.82], tag: 'seg01' }, %) // ln-should-get-tag
|
||||||
|> angledLineToX([
|
|> angledLineToX([
|
||||||
-angleToMatchLengthX('seg01', myVar, %),
|
-angleToMatchLengthX('seg01', myVar, %),
|
||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
], %) // ln-lineTo-xAbsolute should use angleToMatchLengthX helper
|
||||||
|> angledLineToY([
|
|> angledLineToY([
|
||||||
-angleToMatchLengthY('seg01', myVar, %),
|
-angleToMatchLengthY('seg01', myVar, %),
|
||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||||
|> angledLine([45, segLen('seg01', %)], %) // ln-lineTo-free should become angledLine
|
|> angledLine([45, segLen('seg01', %)], %) // ln-lineTo-free should become angledLine
|
||||||
|> angledLine([45, segLen('seg01', %)], %) // ln-angledLineToX-free should become angledLine
|
|> angledLine([45, segLen('seg01', %)], %) // ln-angledLineToX-free should become angledLine
|
||||||
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineToX-angle should become angledLine
|
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineToX-angle should become angledLine
|
||||||
|> angledLineToX([
|
|> angledLineToX([
|
||||||
angleToMatchLengthX('seg01', myVar2, %),
|
angleToMatchLengthX('seg01', myVar2, %),
|
||||||
myVar2
|
myVar2
|
||||||
], %) // ln-angledLineToX-xAbsolute should use angleToMatchLengthX to get angle
|
], %) // ln-angledLineToX-xAbsolute should use angleToMatchLengthX to get angle
|
||||||
|> angledLine([-45, segLen('seg01', %)], %) // ln-angledLineToY-free should become angledLine
|
|> angledLine([-45, segLen('seg01', %)], %) // ln-angledLineToY-free should become angledLine
|
||||||
|> angledLine([myAng2, segLen('seg01', %)], %) // ln-angledLineToY-angle should become angledLine
|
|> angledLine([myAng2, segLen('seg01', %)], %) // ln-angledLineToY-angle should become angledLine
|
||||||
|> angledLineToY([
|
|> angledLineToY([
|
||||||
angleToMatchLengthY('seg01', myVar3, %),
|
angleToMatchLengthY('seg01', myVar3, %),
|
||||||
myVar3
|
myVar3
|
||||||
], %) // ln-angledLineToY-yAbsolute should use angleToMatchLengthY to get angle
|
], %) // ln-angledLineToY-yAbsolute should use angleToMatchLengthY to get angle
|
||||||
|> line([
|
|> line([
|
||||||
min(segLen('seg01', %), myVar),
|
min(segLen('seg01', %), myVar),
|
||||||
legLen(segLen('seg01', %), myVar)
|
legLen(segLen('seg01', %), myVar)
|
||||||
], %) // ln-should use legLen for y
|
], %) // ln-should use legLen for y
|
||||||
|> line([
|
|> line([
|
||||||
min(segLen('seg01', %), myVar),
|
min(segLen('seg01', %), myVar),
|
||||||
-legLen(segLen('seg01', %), myVar)
|
-legLen(segLen('seg01', %), myVar)
|
||||||
], %) // ln-legLen but negative
|
], %) // ln-legLen but negative
|
||||||
|> angledLine([-112, segLen('seg01', %)], %) // ln-should become angledLine
|
|> angledLine([-112, segLen('seg01', %)], %) // ln-should become angledLine
|
||||||
|> angledLine([myVar, segLen('seg01', %)], %) // ln-use segLen for secound arg
|
|> angledLine([myVar, segLen('seg01', %)], %) // ln-use segLen for secound arg
|
||||||
|> angledLine([45, segLen('seg01', %)], %) // ln-segLen again
|
|> angledLine([45, segLen('seg01', %)], %) // ln-segLen again
|
||||||
|> angledLine([54, segLen('seg01', %)], %) // ln-should be transformed to angledLine
|
|> angledLine([54, segLen('seg01', %)], %) // ln-should be transformed to angledLine
|
||||||
|> angledLineOfXLength([
|
|> angledLineOfXLength([
|
||||||
legAngX(segLen('seg01', %), myVar),
|
legAngX(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-should use legAngX to calculate angle
|
], %) // ln-should use legAngX to calculate angle
|
||||||
|> angledLineOfXLength([
|
|> angledLineOfXLength([
|
||||||
180 + legAngX(segLen('seg01', %), myVar),
|
180 + legAngX(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-same as above but should have + 180 to match original quadrant
|
], %) // ln-same as above but should have + 180 to match original quadrant
|
||||||
|> line([
|
|> line([
|
||||||
legLen(segLen('seg01', %), myVar),
|
legLen(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-legLen again but yRelative
|
], %) // ln-legLen again but yRelative
|
||||||
|> line([
|
|> line([
|
||||||
-legLen(segLen('seg01', %), myVar),
|
-legLen(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-negative legLen yRelative
|
], %) // ln-negative legLen yRelative
|
||||||
|> angledLine([58, segLen('seg01', %)], %) // ln-angledLineOfYLength-free should become angledLine
|
|> angledLine([58, segLen('seg01', %)], %) // ln-angledLineOfYLength-free should become angledLine
|
||||||
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineOfYLength-angle should become angledLine
|
|> angledLine([myAng, segLen('seg01', %)], %) // ln-angledLineOfYLength-angle should become angledLine
|
||||||
|> angledLineOfXLength([
|
|> angledLineOfXLength([
|
||||||
legAngY(segLen('seg01', %), myVar),
|
legAngY(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-angledLineOfYLength-yRelative use legAngY
|
], %) // ln-angledLineOfYLength-yRelative use legAngY
|
||||||
|> angledLineOfXLength([
|
|> angledLineOfXLength([
|
||||||
270 + legAngY(segLen('seg01', %), myVar),
|
270 + legAngY(segLen('seg01', %), myVar),
|
||||||
min(segLen('seg01', %), myVar)
|
min(segLen('seg01', %), myVar)
|
||||||
], %) // ln-angledLineOfYLength-yRelative with angle > 90 use binExp
|
], %) // ln-angledLineOfYLength-yRelative with angle > 90 use binExp
|
||||||
|> xLine(segLen('seg01', %), %) // ln-xLine-free should sub in segLen
|
|> xLine(segLen('seg01', %), %) // ln-xLine-free should sub in segLen
|
||||||
|> yLine(segLen('seg01', %), %) // ln-yLine-free should sub in segLen
|
|> yLine(segLen('seg01', %), %) // ln-yLine-free should sub in segLen
|
||||||
|> xLine(segLen('seg01', %), %) // ln-xLineTo-free should convert to xLine
|
|> xLine(segLen('seg01', %), %) // ln-xLineTo-free should convert to xLine
|
||||||
@ -406,9 +406,9 @@ show(part001)`
|
|||||||
'setVertDistance'
|
'setVertDistance'
|
||||||
)
|
)
|
||||||
expect(expectedCode).toContain(`|> lineTo([
|
expect(expectedCode).toContain(`|> lineTo([
|
||||||
lastSegX(%) + myVar,
|
lastSegX(%) + myVar,
|
||||||
segEndY('seg01', %) + 2.93
|
segEndY('seg01', %) + 2.93
|
||||||
], %) // xRelative`)
|
], %) // xRelative`)
|
||||||
})
|
})
|
||||||
it('testing for yRelative to horizontal distance', async () => {
|
it('testing for yRelative to horizontal distance', async () => {
|
||||||
const expectedCode = await helperThing(
|
const expectedCode = await helperThing(
|
||||||
@ -417,9 +417,9 @@ show(part001)`
|
|||||||
'setHorzDistance'
|
'setHorzDistance'
|
||||||
)
|
)
|
||||||
expect(expectedCode).toContain(`|> lineTo([
|
expect(expectedCode).toContain(`|> lineTo([
|
||||||
segEndX('seg01', %) + 2.6,
|
segEndX('seg01', %) + 2.6,
|
||||||
lastSegY(%) + myVar
|
lastSegY(%) + myVar
|
||||||
], %) // yRelative`)
|
], %) // yRelative`)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -110,7 +110,7 @@ const yi=45`
|
|||||||
"brace ')' from 17 to 18",
|
"brace ')' from 17 to 18",
|
||||||
])
|
])
|
||||||
expect(stringSummaryLexer('fn funcName = (param1, param2) => {}')).toEqual([
|
expect(stringSummaryLexer('fn funcName = (param1, param2) => {}')).toEqual([
|
||||||
"word 'fn' from 0 to 2",
|
"keyword 'fn' from 0 to 2",
|
||||||
"whitespace ' ' from 2 to 3",
|
"whitespace ' ' from 2 to 3",
|
||||||
"word 'funcName' from 3 to 11",
|
"word 'funcName' from 3 to 11",
|
||||||
"whitespace ' ' from 11 to 12",
|
"whitespace ' ' from 11 to 12",
|
||||||
@ -203,7 +203,7 @@ const yi=45`
|
|||||||
it('testing array declaration', () => {
|
it('testing array declaration', () => {
|
||||||
const result = stringSummaryLexer(`const yo = [1, 2]`)
|
const result = stringSummaryLexer(`const yo = [1, 2]`)
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"word 'const' from 0 to 5",
|
"keyword 'const' from 0 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"word 'yo' from 6 to 8",
|
"word 'yo' from 6 to 8",
|
||||||
"whitespace ' ' from 8 to 9",
|
"whitespace ' ' from 8 to 9",
|
||||||
@ -220,7 +220,7 @@ const yi=45`
|
|||||||
it('testing object declaration', () => {
|
it('testing object declaration', () => {
|
||||||
const result = stringSummaryLexer(`const yo = {key: 'value'}`)
|
const result = stringSummaryLexer(`const yo = {key: 'value'}`)
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"word 'const' from 0 to 5",
|
"keyword 'const' from 0 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"word 'yo' from 6 to 8",
|
"word 'yo' from 6 to 8",
|
||||||
"whitespace ' ' from 8 to 9",
|
"whitespace ' ' from 8 to 9",
|
||||||
@ -241,7 +241,7 @@ const prop2 = yo['key']
|
|||||||
const key = 'key'
|
const key = 'key'
|
||||||
const prop3 = yo[key]`)
|
const prop3 = yo[key]`)
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"word 'const' from 0 to 5",
|
"keyword 'const' from 0 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"word 'yo' from 6 to 8",
|
"word 'yo' from 6 to 8",
|
||||||
"whitespace ' ' from 8 to 9",
|
"whitespace ' ' from 8 to 9",
|
||||||
@ -254,7 +254,7 @@ const prop3 = yo[key]`)
|
|||||||
"string ''value'' from 17 to 24",
|
"string ''value'' from 17 to 24",
|
||||||
"brace '}' from 24 to 25",
|
"brace '}' from 24 to 25",
|
||||||
"whitespace '\n' from 25 to 26",
|
"whitespace '\n' from 25 to 26",
|
||||||
"word 'const' from 26 to 31",
|
"keyword 'const' from 26 to 31",
|
||||||
"whitespace ' ' from 31 to 32",
|
"whitespace ' ' from 31 to 32",
|
||||||
"word 'prop' from 32 to 36",
|
"word 'prop' from 32 to 36",
|
||||||
"whitespace ' ' from 36 to 37",
|
"whitespace ' ' from 36 to 37",
|
||||||
@ -264,7 +264,7 @@ const prop3 = yo[key]`)
|
|||||||
"period '.' from 41 to 42",
|
"period '.' from 41 to 42",
|
||||||
"word 'key' from 42 to 45",
|
"word 'key' from 42 to 45",
|
||||||
"whitespace '\n' from 45 to 46",
|
"whitespace '\n' from 45 to 46",
|
||||||
"word 'const' from 46 to 51",
|
"keyword 'const' from 46 to 51",
|
||||||
"whitespace ' ' from 51 to 52",
|
"whitespace ' ' from 51 to 52",
|
||||||
"word 'prop2' from 52 to 57",
|
"word 'prop2' from 52 to 57",
|
||||||
"whitespace ' ' from 57 to 58",
|
"whitespace ' ' from 57 to 58",
|
||||||
@ -275,7 +275,7 @@ const prop3 = yo[key]`)
|
|||||||
"string ''key'' from 63 to 68",
|
"string ''key'' from 63 to 68",
|
||||||
"brace ']' from 68 to 69",
|
"brace ']' from 68 to 69",
|
||||||
"whitespace '\n' from 69 to 70",
|
"whitespace '\n' from 69 to 70",
|
||||||
"word 'const' from 70 to 75",
|
"keyword 'const' from 70 to 75",
|
||||||
"whitespace ' ' from 75 to 76",
|
"whitespace ' ' from 75 to 76",
|
||||||
"word 'key' from 76 to 79",
|
"word 'key' from 76 to 79",
|
||||||
"whitespace ' ' from 79 to 80",
|
"whitespace ' ' from 79 to 80",
|
||||||
@ -283,7 +283,7 @@ const prop3 = yo[key]`)
|
|||||||
"whitespace ' ' from 81 to 82",
|
"whitespace ' ' from 81 to 82",
|
||||||
"string ''key'' from 82 to 87",
|
"string ''key'' from 82 to 87",
|
||||||
"whitespace '\n' from 87 to 88",
|
"whitespace '\n' from 87 to 88",
|
||||||
"word 'const' from 88 to 93",
|
"keyword 'const' from 88 to 93",
|
||||||
"whitespace ' ' from 93 to 94",
|
"whitespace ' ' from 93 to 94",
|
||||||
"word 'prop3' from 94 to 99",
|
"word 'prop3' from 94 to 99",
|
||||||
"whitespace ' ' from 99 to 100",
|
"whitespace ' ' from 99 to 100",
|
||||||
@ -299,7 +299,7 @@ const prop3 = yo[key]`)
|
|||||||
const result = stringSummaryLexer(`const yo = 45 // this is a comment
|
const result = stringSummaryLexer(`const yo = 45 // this is a comment
|
||||||
const yo = 6`)
|
const yo = 6`)
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"word 'const' from 0 to 5",
|
"keyword 'const' from 0 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"word 'yo' from 6 to 8",
|
"word 'yo' from 6 to 8",
|
||||||
"whitespace ' ' from 8 to 9",
|
"whitespace ' ' from 8 to 9",
|
||||||
@ -307,9 +307,9 @@ const yo = 6`)
|
|||||||
"whitespace ' ' from 10 to 11",
|
"whitespace ' ' from 10 to 11",
|
||||||
"number '45' from 11 to 13",
|
"number '45' from 11 to 13",
|
||||||
"whitespace ' ' from 13 to 14",
|
"whitespace ' ' from 13 to 14",
|
||||||
"linecomment '// this is a comment' from 14 to 34",
|
"lineComment '// this is a comment' from 14 to 34",
|
||||||
"whitespace '\n' from 34 to 35",
|
"whitespace '\n' from 34 to 35",
|
||||||
"word 'const' from 35 to 40",
|
"keyword 'const' from 35 to 40",
|
||||||
"whitespace ' ' from 40 to 41",
|
"whitespace ' ' from 40 to 41",
|
||||||
"word 'yo' from 41 to 43",
|
"word 'yo' from 41 to 43",
|
||||||
"whitespace ' ' from 43 to 44",
|
"whitespace ' ' from 43 to 44",
|
||||||
@ -328,9 +328,9 @@ const yo=45`)
|
|||||||
"string ''hi'' from 4 to 8",
|
"string ''hi'' from 4 to 8",
|
||||||
"brace ')' from 8 to 9",
|
"brace ')' from 8 to 9",
|
||||||
"whitespace '\n' from 9 to 10",
|
"whitespace '\n' from 9 to 10",
|
||||||
"linecomment '// comment on a line by itself' from 10 to 40",
|
"lineComment '// comment on a line by itself' from 10 to 40",
|
||||||
"whitespace '\n' from 40 to 41",
|
"whitespace '\n' from 40 to 41",
|
||||||
"word 'const' from 41 to 46",
|
"keyword 'const' from 41 to 46",
|
||||||
"whitespace ' ' from 46 to 47",
|
"whitespace ' ' from 46 to 47",
|
||||||
"word 'yo' from 47 to 49",
|
"word 'yo' from 47 to 49",
|
||||||
"operator '=' from 49 to 50",
|
"operator '=' from 49 to 50",
|
||||||
@ -342,7 +342,7 @@ const yo=45`)
|
|||||||
const ya = 6 */
|
const ya = 6 */
|
||||||
const yi=45`)
|
const yi=45`)
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"word 'const' from 0 to 5",
|
"keyword 'const' from 0 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"word 'yo' from 6 to 8",
|
"word 'yo' from 6 to 8",
|
||||||
"whitespace ' ' from 8 to 9",
|
"whitespace ' ' from 8 to 9",
|
||||||
@ -350,10 +350,10 @@ const yi=45`)
|
|||||||
"whitespace ' ' from 10 to 11",
|
"whitespace ' ' from 10 to 11",
|
||||||
"number '45' from 11 to 13",
|
"number '45' from 11 to 13",
|
||||||
"whitespace ' ' from 13 to 14",
|
"whitespace ' ' from 13 to 14",
|
||||||
`blockcomment '/* this is a comment
|
`blockComment '/* this is a comment
|
||||||
const ya = 6 */' from 14 to 50`,
|
const ya = 6 */' from 14 to 50`,
|
||||||
"whitespace '\n' from 50 to 51",
|
"whitespace '\n' from 50 to 51",
|
||||||
"word 'const' from 51 to 56",
|
"keyword 'const' from 51 to 56",
|
||||||
"whitespace ' ' from 56 to 57",
|
"whitespace ' ' from 56 to 57",
|
||||||
"word 'yi' from 57 to 59",
|
"word 'yi' from 57 to 59",
|
||||||
"operator '=' from 59 to 60",
|
"operator '=' from 59 to 60",
|
||||||
|
@ -39,7 +39,6 @@ class MockEngineCommandManager {
|
|||||||
if (commandStr === undefined) {
|
if (commandStr === undefined) {
|
||||||
throw new Error('commandStr is undefined')
|
throw new Error('commandStr is undefined')
|
||||||
}
|
}
|
||||||
console.log('sendModelingCommandFromWasm', id, rangeStr, commandStr)
|
|
||||||
const command: EngineCommand = JSON.parse(commandStr)
|
const command: EngineCommand = JSON.parse(commandStr)
|
||||||
const range: SourceRange = JSON.parse(rangeStr)
|
const range: SourceRange = JSON.parse(rangeStr)
|
||||||
|
|
||||||
|
@ -62,6 +62,17 @@ export const settingsCommandBarMeta: CommandBarMeta = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
'Set Text Wrapping': {
|
||||||
|
displayValue: (args: string[]) => 'Set whether text in the editor wraps',
|
||||||
|
args: [
|
||||||
|
{
|
||||||
|
name: 'textWrapping',
|
||||||
|
type: 'select',
|
||||||
|
defaultValue: 'textWrapping',
|
||||||
|
options: [{ name: 'On' }, { name: 'Off' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
'Set Onboarding Status': {
|
'Set Onboarding Status': {
|
||||||
hide: 'both',
|
hide: 'both',
|
||||||
},
|
},
|
||||||
@ -78,6 +89,7 @@ export const settingsMachine = createMachine(
|
|||||||
unitSystem: UnitSystem.Imperial,
|
unitSystem: UnitSystem.Imperial,
|
||||||
baseUnit: 'in' as BaseUnit,
|
baseUnit: 'in' as BaseUnit,
|
||||||
defaultDirectory: '',
|
defaultDirectory: '',
|
||||||
|
textWrapping: 'On' as 'On' | 'Off',
|
||||||
showDebugPanel: false,
|
showDebugPanel: false,
|
||||||
onboardingStatus: '',
|
onboardingStatus: '',
|
||||||
},
|
},
|
||||||
@ -142,6 +154,17 @@ export const settingsMachine = createMachine(
|
|||||||
target: 'idle',
|
target: 'idle',
|
||||||
internal: true,
|
internal: true,
|
||||||
},
|
},
|
||||||
|
'Set Text Wrapping': {
|
||||||
|
actions: [
|
||||||
|
assign({
|
||||||
|
textWrapping: (_, event) => event.data.textWrapping,
|
||||||
|
}),
|
||||||
|
'persistSettings',
|
||||||
|
'toastSuccess',
|
||||||
|
],
|
||||||
|
target: 'idle',
|
||||||
|
internal: true,
|
||||||
|
},
|
||||||
'Toggle Debug Panel': {
|
'Toggle Debug Panel': {
|
||||||
actions: [
|
actions: [
|
||||||
assign({
|
assign({
|
||||||
@ -182,6 +205,7 @@ export const settingsMachine = createMachine(
|
|||||||
data: { unitSystem: UnitSystem }
|
data: { unitSystem: UnitSystem }
|
||||||
}
|
}
|
||||||
| { type: 'Set Base Unit'; data: { baseUnit: BaseUnit } }
|
| { type: 'Set Base Unit'; data: { baseUnit: BaseUnit } }
|
||||||
|
| { type: 'Set Text Wrapping'; data: { textWrapping: 'On' | 'Off' } }
|
||||||
| { type: 'Set Onboarding Status'; data: { onboardingStatus: string } }
|
| { type: 'Set Onboarding Status'; data: { onboardingStatus: string } }
|
||||||
| { type: 'Toggle Debug Panel' },
|
| { type: 'Toggle Debug Panel' },
|
||||||
},
|
},
|
||||||
|
@ -18,6 +18,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
| 'Set Onboarding Status'
|
| 'Set Onboarding Status'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
@ -26,6 +27,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
| 'Set Onboarding Status'
|
| 'Set Onboarding Status'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
@ -34,6 +36,7 @@ export interface Typegen0 {
|
|||||||
| 'Set Base Unit'
|
| 'Set Base Unit'
|
||||||
| 'Set Default Directory'
|
| 'Set Default Directory'
|
||||||
| 'Set Default Project Name'
|
| 'Set Default Project Name'
|
||||||
|
| 'Set Text Wrapping'
|
||||||
| 'Set Theme'
|
| 'Set Theme'
|
||||||
| 'Set Unit System'
|
| 'Set Unit System'
|
||||||
| 'Toggle Debug Panel'
|
| 'Toggle Debug Panel'
|
||||||
|
@ -103,7 +103,13 @@ export type BaseUnit = 'in' | 'ft' | 'mm' | 'cm' | 'm'
|
|||||||
|
|
||||||
export const baseUnitsUnion = Object.values(baseUnits).flatMap((v) => v)
|
export const baseUnitsUnion = Object.values(baseUnits).flatMap((v) => v)
|
||||||
|
|
||||||
export type PaneType = 'code' | 'variables' | 'debug' | 'kclErrors' | 'logs'
|
export type PaneType =
|
||||||
|
| 'code'
|
||||||
|
| 'variables'
|
||||||
|
| 'debug'
|
||||||
|
| 'kclErrors'
|
||||||
|
| 'logs'
|
||||||
|
| 'lspMessages'
|
||||||
|
|
||||||
export interface StoreState {
|
export interface StoreState {
|
||||||
editorView: EditorView | null
|
editorView: EditorView | null
|
||||||
@ -158,12 +164,12 @@ export interface StoreState {
|
|||||||
setMediaStream: (mediaStream: MediaStream) => void
|
setMediaStream: (mediaStream: MediaStream) => void
|
||||||
isStreamReady: boolean
|
isStreamReady: boolean
|
||||||
setIsStreamReady: (isStreamReady: boolean) => void
|
setIsStreamReady: (isStreamReady: boolean) => void
|
||||||
|
isLSPServerReady: boolean
|
||||||
|
setIsLSPServerReady: (isLSPServerReady: boolean) => void
|
||||||
isMouseDownInStream: boolean
|
isMouseDownInStream: boolean
|
||||||
setIsMouseDownInStream: (isMouseDownInStream: boolean) => void
|
setIsMouseDownInStream: (isMouseDownInStream: boolean) => void
|
||||||
didDragInStream: boolean
|
didDragInStream: boolean
|
||||||
setDidDragInStream: (didDragInStream: boolean) => void
|
setDidDragInStream: (didDragInStream: boolean) => void
|
||||||
cmdId?: string
|
|
||||||
setCmdId: (cmdId: string) => void
|
|
||||||
fileId: string
|
fileId: string
|
||||||
setFileId: (fileId: string) => void
|
setFileId: (fileId: string) => void
|
||||||
streamDimensions: { streamWidth: number; streamHeight: number }
|
streamDimensions: { streamWidth: number; streamHeight: number }
|
||||||
@ -341,6 +347,8 @@ export const useStore = create<StoreState>()(
|
|||||||
setMediaStream: (mediaStream) => set({ mediaStream }),
|
setMediaStream: (mediaStream) => set({ mediaStream }),
|
||||||
isStreamReady: false,
|
isStreamReady: false,
|
||||||
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
|
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
|
||||||
|
isLSPServerReady: false,
|
||||||
|
setIsLSPServerReady: (isLSPServerReady) => set({ isLSPServerReady }),
|
||||||
isMouseDownInStream: false,
|
isMouseDownInStream: false,
|
||||||
setIsMouseDownInStream: (isMouseDownInStream) => {
|
setIsMouseDownInStream: (isMouseDownInStream) => {
|
||||||
set({ isMouseDownInStream })
|
set({ isMouseDownInStream })
|
||||||
@ -350,8 +358,6 @@ export const useStore = create<StoreState>()(
|
|||||||
set({ didDragInStream })
|
set({ didDragInStream })
|
||||||
},
|
},
|
||||||
// For stream event handling
|
// For stream event handling
|
||||||
cmdId: undefined,
|
|
||||||
setCmdId: (cmdId) => set({ cmdId }),
|
|
||||||
fileId: '',
|
fileId: '',
|
||||||
setFileId: (fileId) => set({ fileId }),
|
setFileId: (fileId) => set({ fileId }),
|
||||||
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
|
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
|
||||||
|
315
src/wasm-lib/Cargo.lock
generated
315
src/wasm-lib/Cargo.lock
generated
@ -63,6 +63,54 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstream"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"anstyle-parse",
|
||||||
|
"anstyle-query",
|
||||||
|
"anstyle-wincon",
|
||||||
|
"colorchoice",
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-parse"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
|
||||||
|
dependencies = [
|
||||||
|
"utf8parse",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-query"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
|
||||||
|
dependencies = [
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anstyle-wincon"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd"
|
||||||
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.75"
|
version = "1.0.75"
|
||||||
@ -78,6 +126,22 @@ version = "1.6.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
|
checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "async-codec-lite"
|
||||||
|
version = "0.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2527c30e3972d8ff366b353125dae828c4252a154dbe6063684f6c5e014760a3"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
|
"futures-io",
|
||||||
|
"futures-sink",
|
||||||
|
"log",
|
||||||
|
"pin-project-lite",
|
||||||
|
"thiserror",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "async-trait"
|
name = "async-trait"
|
||||||
version = "0.1.73"
|
version = "0.1.73"
|
||||||
@ -100,6 +164,18 @@ dependencies = [
|
|||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "auto_impl"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fee3da8ef1276b0bee5dd1c7258010d8fffd31801447323115a25560e1327b89"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro-error",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 1.0.109",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autocfg"
|
name = "autocfg"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@ -268,8 +344,8 @@ checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"atty",
|
"atty",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"clap_derive",
|
"clap_derive 3.2.25",
|
||||||
"clap_lex",
|
"clap_lex 0.2.4",
|
||||||
"indexmap 1.9.3",
|
"indexmap 1.9.3",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"strsim",
|
"strsim",
|
||||||
@ -278,6 +354,30 @@ dependencies = [
|
|||||||
"unicase",
|
"unicase",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap"
|
||||||
|
version = "4.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6a13b88d2c62ff462f88e4a121f17a82c1af05693a2f192b5c38d14de73c19f6"
|
||||||
|
dependencies = [
|
||||||
|
"clap_builder",
|
||||||
|
"clap_derive 4.4.2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_builder"
|
||||||
|
version = "4.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2bb9faaa7c2ef94b2743a21f5a29e6f0010dff4caa69ac8e9d6cf8b6fa74da08"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"clap_lex 0.5.1",
|
||||||
|
"strsim",
|
||||||
|
"unicase",
|
||||||
|
"unicode-width",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_derive"
|
||||||
version = "3.2.25"
|
version = "3.2.25"
|
||||||
@ -291,6 +391,18 @@ dependencies = [
|
|||||||
"syn 1.0.109",
|
"syn 1.0.109",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "4.4.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
@ -300,6 +412,18 @@ dependencies = [
|
|||||||
"os_str_bytes",
|
"os_str_bytes",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_lex"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorchoice"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colored"
|
name = "colored"
|
||||||
version = "2.0.4"
|
version = "2.0.4"
|
||||||
@ -387,6 +511,19 @@ dependencies = [
|
|||||||
"typenum",
|
"typenum",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dashmap"
|
||||||
|
version = "5.5.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"hashbrown 0.14.0",
|
||||||
|
"lock_api",
|
||||||
|
"once_cell",
|
||||||
|
"parking_lot_core",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "data-encoding"
|
name = "data-encoding"
|
||||||
version = "2.4.0"
|
version = "2.4.0"
|
||||||
@ -401,7 +538,7 @@ checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-docs"
|
name = "derive-docs"
|
||||||
version = "0.1.0"
|
version = "0.1.3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case",
|
"convert_case",
|
||||||
"expectorate",
|
"expectorate",
|
||||||
@ -416,9 +553,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-docs"
|
name = "derive-docs"
|
||||||
version = "0.1.0"
|
version = "0.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "075291fd1d6d70a886078f7b1c132a160559ceb9a0fe143177872d40ea587906"
|
checksum = "5fe5c5ea065cfabc5a7c5e8ed616e369fbf108c4be01e0e5609bc9846a732664"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case",
|
"convert_case",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@ -920,6 +1057,15 @@ dependencies = [
|
|||||||
"either",
|
"either",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.11.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.9"
|
version = "1.0.9"
|
||||||
@ -948,13 +1094,16 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
version = "0.1.10"
|
version = "0.1.24"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bson",
|
"bson",
|
||||||
"derive-docs 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clap 4.4.2",
|
||||||
|
"dashmap",
|
||||||
|
"derive-docs 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"expectorate",
|
"expectorate",
|
||||||
"futures",
|
"futures",
|
||||||
|
"itertools 0.11.0",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"kittycad",
|
"kittycad",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
@ -968,6 +1117,7 @@ dependencies = [
|
|||||||
"thiserror",
|
"thiserror",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-tungstenite",
|
"tokio-tungstenite",
|
||||||
|
"tower-lsp",
|
||||||
"ts-rs-json-value",
|
"ts-rs-json-value",
|
||||||
"uuid",
|
"uuid",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@ -985,7 +1135,7 @@ dependencies = [
|
|||||||
"bytes",
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"data-encoding",
|
"data-encoding",
|
||||||
"itertools",
|
"itertools 0.10.5",
|
||||||
"parse-display",
|
"parse-display",
|
||||||
"phonenumber",
|
"phonenumber",
|
||||||
"schemars",
|
"schemars",
|
||||||
@ -1049,6 +1199,19 @@ dependencies = [
|
|||||||
"linked-hash-map",
|
"linked-hash-map",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "lsp-types"
|
||||||
|
version = "0.94.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 1.3.2",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_repr",
|
||||||
|
"url",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@ -1201,7 +1364,7 @@ dependencies = [
|
|||||||
"Inflector",
|
"Inflector",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap",
|
"clap 3.2.25",
|
||||||
"data-encoding",
|
"data-encoding",
|
||||||
"format_serde_error",
|
"format_serde_error",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
@ -1332,7 +1495,7 @@ dependencies = [
|
|||||||
"bincode",
|
"bincode",
|
||||||
"either",
|
"either",
|
||||||
"fnv",
|
"fnv",
|
||||||
"itertools",
|
"itertools 0.10.5",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"nom",
|
"nom",
|
||||||
"quick-xml",
|
"quick-xml",
|
||||||
@ -1343,6 +1506,26 @@ dependencies = [
|
|||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pin-project"
|
||||||
|
version = "1.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
|
||||||
|
dependencies = [
|
||||||
|
"pin-project-internal",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pin-project-internal"
|
||||||
|
version = "1.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-lite"
|
name = "pin-project-lite"
|
||||||
version = "0.2.13"
|
version = "0.2.13"
|
||||||
@ -1848,6 +2031,17 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_repr"
|
||||||
|
version = "0.1.16"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_tokenstream"
|
name = "serde_tokenstream"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@ -1940,9 +2134,9 @@ checksum = "8347046d4ebd943127157b94d63abb990fcf729dc4e9978927fdf4ac3c998d06"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "slog-async"
|
name = "slog-async"
|
||||||
version = "2.7.0"
|
version = "2.8.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "766c59b252e62a34651412870ff55d8c4e6d04df19b43eecb2703e417b097ffe"
|
checksum = "72c8038f898a2c79507940990f05386455b3a317d8f18d4caea7cbc3d5096b84"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
"slog",
|
"slog",
|
||||||
@ -2320,6 +2514,61 @@ dependencies = [
|
|||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower"
|
||||||
|
version = "0.4.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"futures-util",
|
||||||
|
"pin-project",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tower-layer",
|
||||||
|
"tower-service",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower-layer"
|
||||||
|
version = "0.3.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower-lsp"
|
||||||
|
version = "0.20.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508"
|
||||||
|
dependencies = [
|
||||||
|
"async-codec-lite",
|
||||||
|
"async-trait",
|
||||||
|
"auto_impl",
|
||||||
|
"bytes",
|
||||||
|
"dashmap",
|
||||||
|
"futures",
|
||||||
|
"httparse",
|
||||||
|
"lsp-types",
|
||||||
|
"memchr",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tower",
|
||||||
|
"tower-lsp-macros",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower-lsp-macros"
|
||||||
|
version = "0.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tower-service"
|
name = "tower-service"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
@ -2334,9 +2583,21 @@ checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
"tracing-attributes",
|
||||||
"tracing-core",
|
"tracing-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-attributes"
|
||||||
|
version = "0.1.26"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.29",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-core"
|
name = "tracing-core"
|
||||||
version = "0.1.31"
|
version = "0.1.31"
|
||||||
@ -2363,10 +2624,11 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ts-rs-json-value"
|
name = "ts-rs-json-value"
|
||||||
version = "7.0.0"
|
version = "7.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b66d07e64e1e39d693819307757ad16878ff2be1f26d6fc2137c4e23bc0c0545"
|
checksum = "f7a6c8eccea9e885ef26336d58ef9ae48b22d7ae3e503422af1902240616d1f6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"schemars",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"ts-rs-macros",
|
"ts-rs-macros",
|
||||||
@ -2490,6 +2752,12 @@ version = "0.7.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
|
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "utf8parse"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.4.1"
|
version = "1.4.1"
|
||||||
@ -2570,6 +2838,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
|
checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"futures-core",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
@ -2609,12 +2878,30 @@ name = "wasm-lib"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bson",
|
"bson",
|
||||||
|
"futures",
|
||||||
"gloo-utils",
|
"gloo-utils",
|
||||||
|
"js-sys",
|
||||||
"kcl-lib",
|
"kcl-lib",
|
||||||
"kittycad",
|
"kittycad",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"tower-lsp",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
|
"wasm-streams",
|
||||||
|
"web-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasm-streams"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7"
|
||||||
|
dependencies = [
|
||||||
|
"futures-util",
|
||||||
|
"js-sys",
|
||||||
|
"wasm-bindgen",
|
||||||
|
"wasm-bindgen-futures",
|
||||||
|
"web-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -16,6 +16,22 @@ serde_json = "1.0.93"
|
|||||||
wasm-bindgen = "0.2.87"
|
wasm-bindgen = "0.2.87"
|
||||||
wasm-bindgen-futures = "0.4.37"
|
wasm-bindgen-futures = "0.4.37"
|
||||||
|
|
||||||
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
|
futures = "0.3.28"
|
||||||
|
js-sys = "0.3.64"
|
||||||
|
tower-lsp = { version = "0.20.0", default-features = false, features = ["runtime-agnostic"] }
|
||||||
|
wasm-bindgen-futures = { version = "0.4.37", features = ["futures-core-03-stream"] }
|
||||||
|
wasm-streams = "0.3.0"
|
||||||
|
|
||||||
|
[target.'cfg(target_arch = "wasm32")'.dependencies.web-sys]
|
||||||
|
version = "0.3.57"
|
||||||
|
features = [
|
||||||
|
"console",
|
||||||
|
"HtmlTextAreaElement",
|
||||||
|
"ReadableStream",
|
||||||
|
"WritableStream",
|
||||||
|
]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
debug = true
|
debug = true
|
||||||
@ -23,5 +39,5 @@ debug = true
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"derive-docs",
|
"derive-docs",
|
||||||
"kcl"
|
"kcl",
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "derive-docs"
|
name = "derive-docs"
|
||||||
description = "A tool for generating documentation from Rust derive macros"
|
description = "A tool for generating documentation from Rust derive macros"
|
||||||
version = "0.1.0"
|
version = "0.1.3"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
|
@ -195,7 +195,9 @@ fn do_stdlib_inner(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
.trim_start_matches('_')
|
||||||
|
.to_string();
|
||||||
|
|
||||||
let ty = match arg {
|
let ty = match arg {
|
||||||
syn::FnArg::Receiver(pat) => pat.ty.as_ref().into_token_stream(),
|
syn::FnArg::Receiver(pat) => pat.ty.as_ref().into_token_stream(),
|
||||||
@ -247,15 +249,21 @@ fn do_stdlib_inner(
|
|||||||
.replace("-> ", "")
|
.replace("-> ", "")
|
||||||
.replace("Result < ", "")
|
.replace("Result < ", "")
|
||||||
.replace(", KclError >", "");
|
.replace(", KclError >", "");
|
||||||
let ret_ty_string = ret_ty_string.trim().to_string();
|
let return_type = if !ret_ty_string.is_empty() {
|
||||||
let ret_ty_ident = format_ident!("{}", ret_ty_string);
|
let ret_ty_string = ret_ty_string.trim().to_string();
|
||||||
let ret_ty_string = clean_type(&ret_ty_string);
|
let ret_ty_ident = format_ident!("{}", ret_ty_string);
|
||||||
let return_type = quote! {
|
let ret_ty_string = clean_type(&ret_ty_string);
|
||||||
#docs_crate::StdLibFnArg {
|
quote! {
|
||||||
name: "".to_string(),
|
Some(#docs_crate::StdLibFnArg {
|
||||||
type_: #ret_ty_string.to_string(),
|
name: "".to_string(),
|
||||||
schema: #ret_ty_ident::json_schema(&mut generator),
|
type_: #ret_ty_string.to_string(),
|
||||||
required: true,
|
schema: #ret_ty_ident::json_schema(&mut generator),
|
||||||
|
required: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
quote! {
|
||||||
|
None
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -275,6 +283,8 @@ fn do_stdlib_inner(
|
|||||||
// ... a struct type called `#name_ident` that has no members
|
// ... a struct type called `#name_ident` that has no members
|
||||||
#[allow(non_camel_case_types, missing_docs)]
|
#[allow(non_camel_case_types, missing_docs)]
|
||||||
#description_doc_comment
|
#description_doc_comment
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars::JsonSchema, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
pub(crate) struct #name_ident {}
|
pub(crate) struct #name_ident {}
|
||||||
// ... a constant of type `#name` whose identifier is also #name_ident
|
// ... a constant of type `#name` whose identifier is also #name_ident
|
||||||
#[allow(non_upper_case_globals, missing_docs)]
|
#[allow(non_upper_case_globals, missing_docs)]
|
||||||
@ -307,7 +317,7 @@ fn do_stdlib_inner(
|
|||||||
vec![#(#arg_types),*]
|
vec![#(#arg_types),*]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_value(&self) -> #docs_crate::StdLibFnArg {
|
fn return_value(&self) -> Option<#docs_crate::StdLibFnArg> {
|
||||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
settings.inline_subschemas = true;
|
settings.inline_subschemas = true;
|
||||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
@ -326,6 +336,10 @@ fn do_stdlib_inner(
|
|||||||
fn std_lib_fn(&self) -> crate::std::StdFn {
|
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||||
#fn_name_ident
|
#fn_name_ident
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clone_box(&self) -> Box<dyn #docs_crate::StdLibFn> {
|
||||||
|
Box::new(self.clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#item
|
#item
|
||||||
@ -529,4 +543,25 @@ mod tests {
|
|||||||
assert!(errors.is_empty());
|
assert!(errors.is_empty());
|
||||||
expectorate::assert_contents("tests/min.gen", &openapitor::types::get_text_fmt(&item).unwrap());
|
expectorate::assert_contents("tests/min.gen", &openapitor::types::get_text_fmt(&item).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_stdlib_show() {
|
||||||
|
let (item, errors) = do_stdlib(
|
||||||
|
quote! {
|
||||||
|
name = "show",
|
||||||
|
},
|
||||||
|
quote! {
|
||||||
|
fn inner_show(
|
||||||
|
/// The args to do shit to.
|
||||||
|
_args: Vec<f64>
|
||||||
|
) {
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let _expected = quote! {};
|
||||||
|
|
||||||
|
assert!(errors.is_empty());
|
||||||
|
expectorate::assert_contents("tests/show.gen", &openapitor::types::get_text_fmt(&item).unwrap());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#[allow(non_camel_case_types, missing_docs)]
|
#[allow(non_camel_case_types, missing_docs)]
|
||||||
#[doc = "Std lib function: lineTo"]
|
#[doc = "Std lib function: lineTo"]
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars :: JsonSchema, ts_rs :: TS)]
|
||||||
|
#[ts(export)]
|
||||||
pub(crate) struct LineTo {}
|
pub(crate) struct LineTo {}
|
||||||
|
|
||||||
#[allow(non_upper_case_globals, missing_docs)]
|
#[allow(non_upper_case_globals, missing_docs)]
|
||||||
@ -42,16 +44,16 @@ impl crate::docs::StdLibFn for LineTo {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_value(&self) -> crate::docs::StdLibFnArg {
|
fn return_value(&self) -> Option<crate::docs::StdLibFnArg> {
|
||||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
settings.inline_subschemas = true;
|
settings.inline_subschemas = true;
|
||||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
crate::docs::StdLibFnArg {
|
Some(crate::docs::StdLibFnArg {
|
||||||
name: "".to_string(),
|
name: "".to_string(),
|
||||||
type_: "SketchGroup".to_string(),
|
type_: "SketchGroup".to_string(),
|
||||||
schema: SketchGroup::json_schema(&mut generator),
|
schema: SketchGroup::json_schema(&mut generator),
|
||||||
required: true,
|
required: true,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unpublished(&self) -> bool {
|
fn unpublished(&self) -> bool {
|
||||||
@ -65,6 +67,10 @@ impl crate::docs::StdLibFn for LineTo {
|
|||||||
fn std_lib_fn(&self) -> crate::std::StdFn {
|
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||||
line_to
|
line_to
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clone_box(&self) -> Box<dyn crate::docs::StdLibFn> {
|
||||||
|
Box::new(self.clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inner_line_to(
|
fn inner_line_to(
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#[allow(non_camel_case_types, missing_docs)]
|
#[allow(non_camel_case_types, missing_docs)]
|
||||||
#[doc = "Std lib function: min"]
|
#[doc = "Std lib function: min"]
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars :: JsonSchema, ts_rs :: TS)]
|
||||||
|
#[ts(export)]
|
||||||
pub(crate) struct Min {}
|
pub(crate) struct Min {}
|
||||||
|
|
||||||
#[allow(non_upper_case_globals, missing_docs)]
|
#[allow(non_upper_case_globals, missing_docs)]
|
||||||
@ -34,16 +36,16 @@ impl crate::docs::StdLibFn for Min {
|
|||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_value(&self) -> crate::docs::StdLibFnArg {
|
fn return_value(&self) -> Option<crate::docs::StdLibFnArg> {
|
||||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
settings.inline_subschemas = true;
|
settings.inline_subschemas = true;
|
||||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
crate::docs::StdLibFnArg {
|
Some(crate::docs::StdLibFnArg {
|
||||||
name: "".to_string(),
|
name: "".to_string(),
|
||||||
type_: "number".to_string(),
|
type_: "number".to_string(),
|
||||||
schema: f64::json_schema(&mut generator),
|
schema: f64::json_schema(&mut generator),
|
||||||
required: true,
|
required: true,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unpublished(&self) -> bool {
|
fn unpublished(&self) -> bool {
|
||||||
@ -57,6 +59,10 @@ impl crate::docs::StdLibFn for Min {
|
|||||||
fn std_lib_fn(&self) -> crate::std::StdFn {
|
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||||
min
|
min
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn clone_box(&self) -> Box<dyn crate::docs::StdLibFn> {
|
||||||
|
Box::new(self.clone())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inner_min(#[doc = r" The args to do shit to."] args: Vec<f64>) -> f64 {
|
fn inner_min(#[doc = r" The args to do shit to."] args: Vec<f64>) -> f64 {
|
||||||
|
63
src/wasm-lib/derive-docs/tests/show.gen
Normal file
63
src/wasm-lib/derive-docs/tests/show.gen
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#[allow(non_camel_case_types, missing_docs)]
|
||||||
|
#[doc = "Std lib function: show"]
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, schemars :: JsonSchema, ts_rs :: TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub(crate) struct Show {}
|
||||||
|
|
||||||
|
#[allow(non_upper_case_globals, missing_docs)]
|
||||||
|
#[doc = "Std lib function: show"]
|
||||||
|
pub(crate) const Show: Show = Show {};
|
||||||
|
impl crate::docs::StdLibFn for Show {
|
||||||
|
fn name(&self) -> String {
|
||||||
|
"show".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn summary(&self) -> String {
|
||||||
|
"".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn description(&self) -> String {
|
||||||
|
"".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tags(&self) -> Vec<String> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args(&self) -> Vec<crate::docs::StdLibFnArg> {
|
||||||
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
|
settings.inline_subschemas = true;
|
||||||
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
|
vec![crate::docs::StdLibFnArg {
|
||||||
|
name: "args".to_string(),
|
||||||
|
type_: "[number]".to_string(),
|
||||||
|
schema: <Vec<f64>>::json_schema(&mut generator),
|
||||||
|
required: true,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn return_value(&self) -> Option<crate::docs::StdLibFnArg> {
|
||||||
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
|
settings.inline_subschemas = true;
|
||||||
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unpublished(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deprecated(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
fn std_lib_fn(&self) -> crate::std::StdFn {
|
||||||
|
show
|
||||||
|
}
|
||||||
|
|
||||||
|
fn clone_box(&self) -> Box<dyn crate::docs::StdLibFn> {
|
||||||
|
Box::new(self.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inner_show(#[doc = r" The args to do shit to."] _args: Vec<f64>) {}
|
@ -1,30 +1,34 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "kcl-lib"
|
name = "kcl-lib"
|
||||||
description = "KittyCAD Language"
|
description = "KittyCAD Language"
|
||||||
version = "0.1.10"
|
version = "0.1.24"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.75"
|
anyhow = { version = "1.0.75", features = ["backtrace"] }
|
||||||
derive-docs = { version = "0.1.0" }
|
clap = { version = "4.4.2", features = ["cargo", "derive", "env", "unicode"] }
|
||||||
|
dashmap = "5.5.3"
|
||||||
|
derive-docs = { version = "0.1.3" }
|
||||||
|
#derive-docs = { path = "../derive-docs" }
|
||||||
kittycad = { version = "0.2.23", default-features = false, features = ["js"] }
|
kittycad = { version = "0.2.23", default-features = false, features = ["js"] }
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
parse-display = "0.8.2"
|
parse-display = "0.8.2"
|
||||||
regex = "1.7.1"
|
regex = "1.7.1"
|
||||||
schemars = { version = "0.8", features = ["url", "uuid1"] }
|
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
|
||||||
serde = {version = "1.0.152", features = ["derive"] }
|
serde = {version = "1.0.152", features = ["derive"] }
|
||||||
serde_json = "1.0.93"
|
serde_json = "1.0.93"
|
||||||
thiserror = "1.0.47"
|
thiserror = "1.0.47"
|
||||||
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "uuid-impl"] }
|
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
|
||||||
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
||||||
wasm-bindgen = "0.2.87"
|
|
||||||
wasm-bindgen-futures = "0.4.37"
|
|
||||||
|
|
||||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||||
js-sys = { version = "0.3.64" }
|
js-sys = { version = "0.3.64" }
|
||||||
|
tower-lsp = { version = "0.20.0", default-features = false, features = ["runtime-agnostic"] }
|
||||||
|
wasm-bindgen = "0.2.87"
|
||||||
|
wasm-bindgen-futures = "0.4.37"
|
||||||
|
|
||||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||||
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
bson = { version = "2.7.0", features = ["uuid-1", "chrono"] }
|
||||||
@ -32,6 +36,7 @@ futures = { version = "0.3.28" }
|
|||||||
reqwest = { version = "0.11.20", default-features = false }
|
reqwest = { version = "0.11.20", default-features = false }
|
||||||
tokio = { version = "1.32.0", features = ["full"] }
|
tokio = { version = "1.32.0", features = ["full"] }
|
||||||
tokio-tungstenite = { version = "0.20.0", features = ["rustls-tls-native-roots"] }
|
tokio-tungstenite = { version = "0.20.0", features = ["rustls-tls-native-roots"] }
|
||||||
|
tower-lsp = { version = "0.20.0", features = ["proposed"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["engine"]
|
default = ["engine"]
|
||||||
@ -43,5 +48,6 @@ debug = true
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
expectorate = "1.0.7"
|
expectorate = "1.0.7"
|
||||||
|
itertools = "0.11.0"
|
||||||
pretty_assertions = "1.4.0"
|
pretty_assertions = "1.4.0"
|
||||||
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
tokio = { version = "1.32.0", features = ["rt-multi-thread", "macros", "time"] }
|
||||||
|
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
target
|
||||||
|
corpus
|
||||||
|
artifacts
|
||||||
|
coverage
|
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
[package]
|
||||||
|
name = "kcl-lib-fuzz"
|
||||||
|
version = "0.0.0"
|
||||||
|
publish = false
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
cargo-fuzz = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
libfuzzer-sys = "0.4"
|
||||||
|
|
||||||
|
[dependencies.kcl-lib]
|
||||||
|
path = ".."
|
||||||
|
|
||||||
|
# Prevent this from interfering with workspaces
|
||||||
|
[workspace]
|
||||||
|
members = ["."]
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
debug = 1
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "parser"
|
||||||
|
path = "fuzz_targets/parser.rs"
|
||||||
|
test = false
|
||||||
|
doc = false
|
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
#![no_main]
|
||||||
|
#[macro_use]
|
||||||
|
extern crate libfuzzer_sys;
|
||||||
|
extern crate kcl_lib;
|
||||||
|
|
||||||
|
fuzz_target!(|data: &[u8]| {
|
||||||
|
if let Ok(s) = std::str::from_utf8(data) {
|
||||||
|
let tokens = kcl_lib::tokeniser::lexer(s);
|
||||||
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
|
if let Ok(_) = parser.ast() {
|
||||||
|
println!("OK");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,18 @@
|
|||||||
//! Functions for generating docs for our stdlib functions.
|
//! Functions for generating docs for our stdlib functions.
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tower_lsp::lsp_types::{
|
||||||
|
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, Documentation, InsertTextFormat, MarkupContent,
|
||||||
|
MarkupKind, ParameterInformation, ParameterLabel, SignatureHelp, SignatureInformation,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::std::Primitive;
|
use crate::std::Primitive;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct StdLibFnData {
|
pub struct StdLibFnData {
|
||||||
/// The name of the function.
|
/// The name of the function.
|
||||||
pub name: String,
|
pub name: String,
|
||||||
@ -18,7 +25,7 @@ pub struct StdLibFnData {
|
|||||||
/// The args of the function.
|
/// The args of the function.
|
||||||
pub args: Vec<StdLibFnArg>,
|
pub args: Vec<StdLibFnArg>,
|
||||||
/// The return value of the function.
|
/// The return value of the function.
|
||||||
pub return_value: StdLibFnArg,
|
pub return_value: Option<StdLibFnArg>,
|
||||||
/// If the function is unpublished.
|
/// If the function is unpublished.
|
||||||
pub unpublished: bool,
|
pub unpublished: bool,
|
||||||
/// If the function is deprecated.
|
/// If the function is deprecated.
|
||||||
@ -26,7 +33,9 @@ pub struct StdLibFnData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This struct defines a single argument to a stdlib function.
|
/// This struct defines a single argument to a stdlib function.
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, JsonSchema, ts_rs::TS)]
|
||||||
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct StdLibFnArg {
|
pub struct StdLibFnArg {
|
||||||
/// The name of the argument.
|
/// The name of the argument.
|
||||||
pub name: String,
|
pub name: String,
|
||||||
@ -41,23 +50,36 @@ pub struct StdLibFnArg {
|
|||||||
impl StdLibFnArg {
|
impl StdLibFnArg {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn get_type_string(&self) -> Result<(String, bool)> {
|
pub fn get_type_string(&self) -> Result<(String, bool)> {
|
||||||
get_type_string_from_schema(&self.schema)
|
get_type_string_from_schema(&self.schema.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn get_autocomplete_string(&self) -> Result<String> {
|
pub fn get_autocomplete_string(&self) -> Result<String> {
|
||||||
get_autocomplete_string_from_schema(&self.schema)
|
get_autocomplete_string_from_schema(&self.schema.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn description(&self) -> Option<String> {
|
pub fn description(&self) -> Option<String> {
|
||||||
get_description_string_from_schema(&self.schema)
|
get_description_string_from_schema(&self.schema.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<StdLibFnArg> for ParameterInformation {
|
||||||
|
fn from(arg: StdLibFnArg) -> Self {
|
||||||
|
ParameterInformation {
|
||||||
|
label: ParameterLabel::Simple(arg.name.to_string()),
|
||||||
|
documentation: arg.description().map(|description| {
|
||||||
|
Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: description,
|
||||||
|
})
|
||||||
|
}),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This trait defines functions called upon stdlib functions to generate
|
/// This trait defines functions called upon stdlib functions to generate
|
||||||
/// documentation for them.
|
/// documentation for them.
|
||||||
pub trait StdLibFn {
|
pub trait StdLibFn: std::fmt::Debug + Send + Sync {
|
||||||
/// The name of the function.
|
/// The name of the function.
|
||||||
fn name(&self) -> String;
|
fn name(&self) -> String;
|
||||||
|
|
||||||
@ -74,7 +96,7 @@ pub trait StdLibFn {
|
|||||||
fn args(&self) -> Vec<StdLibFnArg>;
|
fn args(&self) -> Vec<StdLibFnArg>;
|
||||||
|
|
||||||
/// The return value of the function.
|
/// The return value of the function.
|
||||||
fn return_value(&self) -> StdLibFnArg;
|
fn return_value(&self) -> Option<StdLibFnArg>;
|
||||||
|
|
||||||
/// If the function is unpublished.
|
/// If the function is unpublished.
|
||||||
fn unpublished(&self) -> bool;
|
fn unpublished(&self) -> bool;
|
||||||
@ -85,6 +107,9 @@ pub trait StdLibFn {
|
|||||||
/// The function itself.
|
/// The function itself.
|
||||||
fn std_lib_fn(&self) -> crate::std::StdFn;
|
fn std_lib_fn(&self) -> crate::std::StdFn;
|
||||||
|
|
||||||
|
/// Helper function to clone the boxed trait object.
|
||||||
|
fn clone_box(&self) -> Box<dyn StdLibFn>;
|
||||||
|
|
||||||
/// Return a JSON struct representing the function.
|
/// Return a JSON struct representing the function.
|
||||||
fn to_json(&self) -> Result<StdLibFnData> {
|
fn to_json(&self) -> Result<StdLibFnData> {
|
||||||
Ok(StdLibFnData {
|
Ok(StdLibFnData {
|
||||||
@ -108,11 +133,139 @@ pub trait StdLibFn {
|
|||||||
}
|
}
|
||||||
signature.push_str(&format!("{}: {}", arg.name, arg.type_));
|
signature.push_str(&format!("{}: {}", arg.name, arg.type_));
|
||||||
}
|
}
|
||||||
signature.push_str(") -> ");
|
signature.push(')');
|
||||||
signature.push_str(&self.return_value().type_);
|
if let Some(return_value) = self.return_value() {
|
||||||
|
signature.push_str(&format!(" -> {}", return_value.type_));
|
||||||
|
}
|
||||||
|
|
||||||
signature
|
signature
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn to_completion_item(&self) -> CompletionItem {
|
||||||
|
CompletionItem {
|
||||||
|
label: self.name(),
|
||||||
|
label_details: Some(CompletionItemLabelDetails {
|
||||||
|
detail: Some(self.fn_signature().replace(&self.name(), "")),
|
||||||
|
description: None,
|
||||||
|
}),
|
||||||
|
kind: Some(CompletionItemKind::FUNCTION),
|
||||||
|
detail: None,
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: if !self.description().is_empty() {
|
||||||
|
format!("{}\n\n{}", self.summary(), self.description())
|
||||||
|
} else {
|
||||||
|
self.summary()
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
deprecated: Some(self.deprecated()),
|
||||||
|
preselect: None,
|
||||||
|
sort_text: None,
|
||||||
|
filter_text: None,
|
||||||
|
insert_text: Some(format!(
|
||||||
|
"{}({})",
|
||||||
|
self.name(),
|
||||||
|
self.args()
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
// It is okay to unwrap here since in the `kcl-lib` tests, we would have caught
|
||||||
|
// any errors in the `self`'s signature.
|
||||||
|
.map(|(index, item)| {
|
||||||
|
let format = item.get_autocomplete_string().unwrap();
|
||||||
|
if item.type_ == "SketchGroup" || item.type_ == "ExtrudeGroup" {
|
||||||
|
format!("${{{}:{}}}", index + 1, "%")
|
||||||
|
} else {
|
||||||
|
format!("${{{}:{}}}", index + 1, format)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(",")
|
||||||
|
)),
|
||||||
|
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
||||||
|
insert_text_mode: None,
|
||||||
|
text_edit: None,
|
||||||
|
additional_text_edits: None,
|
||||||
|
command: None,
|
||||||
|
commit_characters: None,
|
||||||
|
data: None,
|
||||||
|
tags: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_signature_help(&self) -> SignatureHelp {
|
||||||
|
// Fill this in based on the current positon of the cursor.
|
||||||
|
let active_parameter = None;
|
||||||
|
|
||||||
|
SignatureHelp {
|
||||||
|
signatures: vec![SignatureInformation {
|
||||||
|
label: self.name(),
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: if !self.description().is_empty() {
|
||||||
|
format!("{}\n\n{}", self.summary(), self.description())
|
||||||
|
} else {
|
||||||
|
self.summary()
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
parameters: Some(self.args().into_iter().map(|arg| arg.into()).collect()),
|
||||||
|
active_parameter,
|
||||||
|
}],
|
||||||
|
active_signature: Some(0),
|
||||||
|
active_parameter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JsonSchema for dyn StdLibFn {
|
||||||
|
fn schema_name() -> String {
|
||||||
|
"StdLibFn".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||||
|
gen.subschema_for::<StdLibFnData>()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for Box<dyn StdLibFn> {
|
||||||
|
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||||
|
self.to_json().unwrap().serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for Box<dyn StdLibFn> {
|
||||||
|
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||||
|
let data = StdLibFnData::deserialize(deserializer)?;
|
||||||
|
let stdlib = crate::std::StdLib::new();
|
||||||
|
let stdlib_fn = stdlib
|
||||||
|
.get(&data.name)
|
||||||
|
.ok_or_else(|| serde::de::Error::custom(format!("StdLibFn {} not found", data.name)))?;
|
||||||
|
Ok(stdlib_fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ts_rs::TS for dyn StdLibFn {
|
||||||
|
const EXPORT_TO: Option<&'static str> = Some("bindings/StdLibFnData");
|
||||||
|
|
||||||
|
fn name() -> String {
|
||||||
|
"StdLibFnData".to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn dependencies() -> Vec<ts_rs::Dependency>
|
||||||
|
where
|
||||||
|
Self: 'static,
|
||||||
|
{
|
||||||
|
StdLibFnData::dependencies()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transparent() -> bool {
|
||||||
|
StdLibFnData::transparent()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for Box<dyn StdLibFn> {
|
||||||
|
fn clone(&self) -> Box<dyn StdLibFn> {
|
||||||
|
self.clone_box()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_description_string_from_schema(schema: &schemars::schema::Schema) -> Option<String> {
|
pub fn get_description_string_from_schema(schema: &schemars::schema::Schema) -> Option<String> {
|
||||||
@ -152,11 +305,7 @@ pub fn get_type_string_from_schema(schema: &schemars::schema::Schema) -> Result<
|
|||||||
if let Some(description) = get_description_string_from_schema(prop) {
|
if let Some(description) = get_description_string_from_schema(prop) {
|
||||||
fn_docs.push_str(&format!("\t// {}\n", description));
|
fn_docs.push_str(&format!("\t// {}\n", description));
|
||||||
}
|
}
|
||||||
fn_docs.push_str(&format!(
|
fn_docs.push_str(&format!("\t{}: {},\n", prop_name, get_type_string_from_schema(prop)?.0,));
|
||||||
"\t\"{}\": {},\n",
|
|
||||||
prop_name,
|
|
||||||
get_type_string_from_schema(prop)?.0,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn_docs.push('}');
|
fn_docs.push('}');
|
||||||
@ -234,7 +383,7 @@ pub fn get_autocomplete_string_from_schema(schema: &schemars::schema::Schema) ->
|
|||||||
fn_docs.push_str(&format!("\t// {}\n", description));
|
fn_docs.push_str(&format!("\t// {}\n", description));
|
||||||
}
|
}
|
||||||
fn_docs.push_str(&format!(
|
fn_docs.push_str(&format!(
|
||||||
"\t\"{}\": {},\n",
|
"\t{}: {},\n",
|
||||||
prop_name,
|
prop_name,
|
||||||
get_autocomplete_string_from_schema(prop)?,
|
get_autocomplete_string_from_schema(prop)?,
|
||||||
));
|
));
|
||||||
@ -282,3 +431,93 @@ pub fn get_autocomplete_string_from_schema(schema: &schemars::schema::Schema) ->
|
|||||||
schemars::schema::Schema::Bool(_) => Ok(Primitive::Bool.to_string()),
|
schemars::schema::Schema::Bool(_) => Ok(Primitive::Bool.to_string()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn completion_item_from_enum_schema(
|
||||||
|
schema: &schemars::schema::Schema,
|
||||||
|
kind: CompletionItemKind,
|
||||||
|
) -> Result<CompletionItem> {
|
||||||
|
// Get the docs for the schema.
|
||||||
|
let description = get_description_string_from_schema(schema).unwrap_or_default();
|
||||||
|
let schemars::schema::Schema::Object(o) = schema else {
|
||||||
|
anyhow::bail!("expected object schema: {:#?}", schema);
|
||||||
|
};
|
||||||
|
let Some(enum_values) = o.enum_values.as_ref() else {
|
||||||
|
anyhow::bail!("expected enum values: {:#?}", o);
|
||||||
|
};
|
||||||
|
|
||||||
|
if enum_values.len() > 1 {
|
||||||
|
anyhow::bail!("expected only one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
if enum_values.is_empty() {
|
||||||
|
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
let label = enum_values[0].to_string();
|
||||||
|
|
||||||
|
Ok(CompletionItem {
|
||||||
|
label,
|
||||||
|
label_details: None,
|
||||||
|
kind: Some(kind),
|
||||||
|
detail: Some(description.to_string()),
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: description.to_string(),
|
||||||
|
})),
|
||||||
|
deprecated: Some(false),
|
||||||
|
preselect: None,
|
||||||
|
sort_text: None,
|
||||||
|
filter_text: None,
|
||||||
|
insert_text: None,
|
||||||
|
insert_text_format: None,
|
||||||
|
insert_text_mode: None,
|
||||||
|
text_edit: None,
|
||||||
|
additional_text_edits: None,
|
||||||
|
command: None,
|
||||||
|
commit_characters: None,
|
||||||
|
data: None,
|
||||||
|
tags: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_serialize_function() {
|
||||||
|
let some_function = crate::abstract_syntax_tree_types::Function::StdLib {
|
||||||
|
func: Box::new(crate::std::sketch::Line),
|
||||||
|
};
|
||||||
|
let serialized = serde_json::to_string(&some_function).unwrap();
|
||||||
|
assert!(serialized.contains(r#"{"type":"StdLib""#));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deserialize_function() {
|
||||||
|
let some_function_string = r#"{"type":"StdLib","func":{"name":"line","summary":"","description":"","tags":[],"returnValue":{"type":"","required":false,"name":"","schema":{}},"args":[],"unpublished":false,"deprecated":false}}"#;
|
||||||
|
let some_function: crate::abstract_syntax_tree_types::Function =
|
||||||
|
serde_json::from_str(some_function_string).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
some_function,
|
||||||
|
crate::abstract_syntax_tree_types::Function::StdLib {
|
||||||
|
func: Box::new(crate::std::sketch::Line),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_deserialize_function_show() {
|
||||||
|
let some_function_string = r#"{"type":"StdLib","func":{"name":"show","summary":"","description":"","tags":[],"returnValue":{"type":"","required":false,"name":"","schema":{}},"args":[],"unpublished":false,"deprecated":false}}"#;
|
||||||
|
let some_function: crate::abstract_syntax_tree_types::Function =
|
||||||
|
serde_json::from_str(some_function_string).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
some_function,
|
||||||
|
crate::abstract_syntax_tree_types::Function::StdLib {
|
||||||
|
func: Box::new(crate::std::Show),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
//! Functions for managing engine communications.
|
//! Functions for managing engine communications.
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[cfg(not(test))]
|
||||||
|
#[cfg(feature = "engine")]
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
@ -32,19 +35,18 @@ pub mod conn_mock;
|
|||||||
#[cfg(not(test))]
|
#[cfg(not(test))]
|
||||||
pub use conn_mock::EngineConnection;
|
pub use conn_mock::EngineConnection;
|
||||||
|
|
||||||
use crate::executor::SourceRange;
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[cfg(not(test))]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub struct EngineManager {
|
pub struct EngineManager {
|
||||||
connection: EngineConnection,
|
connection: EngineConnection,
|
||||||
}
|
}
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[cfg(not(test))]
|
||||||
|
#[cfg(feature = "engine")]
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
impl EngineManager {
|
impl EngineManager {
|
||||||
#[cfg(target_arch = "wasm32")]
|
|
||||||
#[cfg(not(test))]
|
|
||||||
#[cfg(feature = "engine")]
|
|
||||||
#[wasm_bindgen(constructor)]
|
#[wasm_bindgen(constructor)]
|
||||||
pub async fn new(manager: conn_wasm::EngineCommandManager) -> EngineManager {
|
pub async fn new(manager: conn_wasm::EngineCommandManager) -> EngineManager {
|
||||||
EngineManager {
|
EngineManager {
|
||||||
@ -57,7 +59,7 @@ impl EngineManager {
|
|||||||
let id = uuid::Uuid::parse_str(id_str).map_err(|e| e.to_string())?;
|
let id = uuid::Uuid::parse_str(id_str).map_err(|e| e.to_string())?;
|
||||||
let cmd = serde_json::from_str(cmd_str).map_err(|e| e.to_string())?;
|
let cmd = serde_json::from_str(cmd_str).map_err(|e| e.to_string())?;
|
||||||
self.connection
|
self.connection
|
||||||
.send_modeling_cmd(id, SourceRange::default(), cmd)
|
.send_modeling_cmd(id, crate::executor::SourceRange::default(), cmd)
|
||||||
.map_err(String::from)?;
|
.map_err(String::from)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||||
|
|
||||||
|
use crate::executor::SourceRange;
|
||||||
|
|
||||||
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS)]
|
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
@ -29,7 +32,7 @@ pub enum KclError {
|
|||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
pub struct KclErrorDetails {
|
pub struct KclErrorDetails {
|
||||||
#[serde(rename = "sourceRanges")]
|
#[serde(rename = "sourceRanges")]
|
||||||
pub source_ranges: Vec<crate::executor::SourceRange>,
|
pub source_ranges: Vec<SourceRange>,
|
||||||
#[serde(rename = "msg")]
|
#[serde(rename = "msg")]
|
||||||
pub message: String,
|
pub message: String,
|
||||||
}
|
}
|
||||||
@ -61,6 +64,37 @@ impl KclError {
|
|||||||
|
|
||||||
(format!("{}: {}", type_, message), line, column)
|
(format!("{}: {}", type_, message), line, column)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn source_ranges(&self) -> Vec<SourceRange> {
|
||||||
|
match &self {
|
||||||
|
KclError::Syntax(e) => e.source_ranges.clone(),
|
||||||
|
KclError::Semantic(e) => e.source_ranges.clone(),
|
||||||
|
KclError::Type(e) => e.source_ranges.clone(),
|
||||||
|
KclError::Unimplemented(e) => e.source_ranges.clone(),
|
||||||
|
KclError::Unexpected(e) => e.source_ranges.clone(),
|
||||||
|
KclError::ValueAlreadyDefined(e) => e.source_ranges.clone(),
|
||||||
|
KclError::UndefinedValue(e) => e.source_ranges.clone(),
|
||||||
|
KclError::InvalidExpression(e) => e.source_ranges.clone(),
|
||||||
|
KclError::Engine(e) => e.source_ranges.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn to_lsp_diagnostic(&self, code: &str) -> Diagnostic {
|
||||||
|
let (message, _, _) = self.get_message_line_column(code);
|
||||||
|
let source_ranges = self.source_ranges();
|
||||||
|
|
||||||
|
Diagnostic {
|
||||||
|
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
||||||
|
severity: Some(DiagnosticSeverity::ERROR),
|
||||||
|
code: None,
|
||||||
|
// TODO: this is neat we can pass a URL to a help page here for this specific error.
|
||||||
|
code_description: None,
|
||||||
|
source: Some("kcl".to_string()),
|
||||||
|
message,
|
||||||
|
related_information: None,
|
||||||
|
tags: None,
|
||||||
|
data: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is different than to_string() in that it will serialize the Error
|
/// This is different than to_string() in that it will serialize the Error
|
||||||
|
@ -5,9 +5,10 @@ use std::collections::HashMap;
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use schemars::JsonSchema;
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tower_lsp::lsp_types::{Position as LspPosition, Range as LspRange};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
abstract_syntax_tree_types::{BodyItem, FunctionExpression, Value},
|
abstract_syntax_tree_types::{BodyItem, Function, FunctionExpression, Value},
|
||||||
engine::EngineConnection,
|
engine::EngineConnection,
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
};
|
};
|
||||||
@ -281,10 +282,65 @@ pub struct Position(pub [f64; 3]);
|
|||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
pub struct Rotation(pub [f64; 4]);
|
pub struct Rotation(pub [f64; 4]);
|
||||||
|
|
||||||
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Copy, Clone, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Copy, Clone, ts_rs::TS, JsonSchema, Hash, Eq)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
pub struct SourceRange(pub [usize; 2]);
|
pub struct SourceRange(pub [usize; 2]);
|
||||||
|
|
||||||
|
impl SourceRange {
|
||||||
|
/// Create a new source range.
|
||||||
|
pub fn new(start: usize, end: usize) -> Self {
|
||||||
|
Self([start, end])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the start of the range.
|
||||||
|
pub fn start(&self) -> usize {
|
||||||
|
self.0[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the end of the range.
|
||||||
|
pub fn end(&self) -> usize {
|
||||||
|
self.0[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if the range contains a position.
|
||||||
|
pub fn contains(&self, pos: usize) -> bool {
|
||||||
|
pos >= self.start() && pos <= self.end()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn start_to_lsp_position(&self, code: &str) -> LspPosition {
|
||||||
|
// Calculate the line and column of the error from the source range.
|
||||||
|
// Lines are zero indexed in vscode so we need to subtract 1.
|
||||||
|
let mut line = code[..self.start()].lines().count();
|
||||||
|
if line > 0 {
|
||||||
|
line = line.saturating_sub(1);
|
||||||
|
}
|
||||||
|
let column = code[..self.start()].lines().last().map(|l| l.len()).unwrap_or_default();
|
||||||
|
|
||||||
|
LspPosition {
|
||||||
|
line: line as u32,
|
||||||
|
character: column as u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn end_to_lsp_position(&self, code: &str) -> LspPosition {
|
||||||
|
// Calculate the line and column of the error from the source range.
|
||||||
|
// Lines are zero indexed in vscode so we need to subtract 1.
|
||||||
|
let line = code[..self.end()].lines().count() - 1;
|
||||||
|
let column = code[..self.end()].lines().last().map(|l| l.len()).unwrap_or_default();
|
||||||
|
|
||||||
|
LspPosition {
|
||||||
|
line: line as u32,
|
||||||
|
character: column as u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_lsp_range(&self, code: &str) -> LspRange {
|
||||||
|
let start = self.start_to_lsp_position(code);
|
||||||
|
let end = self.end_to_lsp_position(code);
|
||||||
|
LspRange { start, end }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize, PartialEq, Clone, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Deserialize, Serialize, PartialEq, Clone, ts_rs::TS, JsonSchema)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
pub struct Point2d {
|
pub struct Point2d {
|
||||||
@ -509,7 +565,6 @@ pub fn execute(
|
|||||||
engine: &mut EngineConnection,
|
engine: &mut EngineConnection,
|
||||||
) -> Result<ProgramMemory, KclError> {
|
) -> Result<ProgramMemory, KclError> {
|
||||||
let mut pipe_info = PipeInfo::default();
|
let mut pipe_info = PipeInfo::default();
|
||||||
let stdlib = crate::std::StdLib::new();
|
|
||||||
|
|
||||||
// Iterate over the body of the program.
|
// Iterate over the body of the program.
|
||||||
for statement in &program.body {
|
for statement in &program.body {
|
||||||
@ -529,7 +584,8 @@ pub fn execute(
|
|||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if fn_name == "show" {
|
let _show_fn = Box::new(crate::std::Show);
|
||||||
|
if let Function::StdLib { func: _show_fn } = &call_expr.function {
|
||||||
if options != BodyType::Root {
|
if options != BodyType::Root {
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
return Err(KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot call show outside of a root".to_string(),
|
message: "Cannot call show outside of a root".to_string(),
|
||||||
@ -563,7 +619,7 @@ pub fn execute(
|
|||||||
memory.add(&var_name, value.clone(), source_range)?;
|
memory.add(&var_name, value.clone(), source_range)?;
|
||||||
}
|
}
|
||||||
Value::BinaryExpression(binary_expression) => {
|
Value::BinaryExpression(binary_expression) => {
|
||||||
let result = binary_expression.get_result(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = binary_expression.get_result(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::FunctionExpression(function_expression) => {
|
Value::FunctionExpression(function_expression) => {
|
||||||
@ -586,7 +642,7 @@ pub fn execute(
|
|||||||
for (index, param) in function_expression.params.iter().enumerate() {
|
for (index, param) in function_expression.params.iter().enumerate() {
|
||||||
fn_memory.add(
|
fn_memory.add(
|
||||||
¶m.name,
|
¶m.name,
|
||||||
args.clone().get(index).unwrap().clone(),
|
args.get(index).unwrap().clone(),
|
||||||
param.into(),
|
param.into(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
@ -600,11 +656,11 @@ pub fn execute(
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
Value::CallExpression(call_expression) => {
|
Value::CallExpression(call_expression) => {
|
||||||
let result = call_expression.execute(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = call_expression.execute(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::PipeExpression(pipe_expression) => {
|
Value::PipeExpression(pipe_expression) => {
|
||||||
let result = pipe_expression.get_result(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = pipe_expression.get_result(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::PipeSubstitution(pipe_substitution) => {
|
Value::PipeSubstitution(pipe_substitution) => {
|
||||||
@ -617,11 +673,11 @@ pub fn execute(
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
Value::ArrayExpression(array_expression) => {
|
Value::ArrayExpression(array_expression) => {
|
||||||
let result = array_expression.execute(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = array_expression.execute(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::ObjectExpression(object_expression) => {
|
Value::ObjectExpression(object_expression) => {
|
||||||
let result = object_expression.execute(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = object_expression.execute(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::MemberExpression(member_expression) => {
|
Value::MemberExpression(member_expression) => {
|
||||||
@ -629,7 +685,7 @@ pub fn execute(
|
|||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
Value::UnaryExpression(unary_expression) => {
|
Value::UnaryExpression(unary_expression) => {
|
||||||
let result = unary_expression.get_result(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = unary_expression.get_result(memory, &mut pipe_info, engine)?;
|
||||||
memory.add(&var_name, result, source_range)?;
|
memory.add(&var_name, result, source_range)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -637,7 +693,7 @@ pub fn execute(
|
|||||||
}
|
}
|
||||||
BodyItem::ReturnStatement(return_statement) => match &return_statement.argument {
|
BodyItem::ReturnStatement(return_statement) => match &return_statement.argument {
|
||||||
Value::BinaryExpression(bin_expr) => {
|
Value::BinaryExpression(bin_expr) => {
|
||||||
let result = bin_expr.get_result(memory, &mut pipe_info, &stdlib, engine)?;
|
let result = bin_expr.get_result(memory, &mut pipe_info, engine)?;
|
||||||
memory.return_ = Some(ProgramReturn::Value(result));
|
memory.return_ = Some(ProgramReturn::Value(result));
|
||||||
}
|
}
|
||||||
Value::Identifier(identifier) => {
|
Value::Identifier(identifier) => {
|
||||||
@ -660,7 +716,8 @@ mod tests {
|
|||||||
|
|
||||||
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
||||||
let tokens = crate::tokeniser::lexer(code);
|
let tokens = crate::tokeniser::lexer(code);
|
||||||
let program = crate::parser::abstract_syntax_tree(&tokens)?;
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let program = parser.ast()?;
|
||||||
let mut mem: ProgramMemory = Default::default();
|
let mut mem: ProgramMemory = Default::default();
|
||||||
let mut engine = EngineConnection::new().await?;
|
let mut engine = EngineConnection::new().await?;
|
||||||
let memory = execute(program, &mut mem, BodyType::Root, &mut engine)?;
|
let memory = execute(program, &mut mem, BodyType::Root, &mut engine)?;
|
||||||
@ -773,6 +830,28 @@ const part001 = startSketchAt([0, 0])
|
|||||||
legLen(segLen('seg01', %), myVar)
|
legLen(segLen('seg01', %), myVar)
|
||||||
], %)
|
], %)
|
||||||
|
|
||||||
|
show(part001)"#;
|
||||||
|
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_execute_with_inline_comment() {
|
||||||
|
let ast = r#"const baseThick = 1
|
||||||
|
const armAngle = 60
|
||||||
|
|
||||||
|
const baseThickHalf = baseThick / 2
|
||||||
|
const halfArmAngle = armAngle / 2
|
||||||
|
|
||||||
|
const arrExpShouldNotBeIncluded = [1, 2, 3]
|
||||||
|
const objExpShouldNotBeIncluded = { a: 1, b: 2, c: 3 }
|
||||||
|
|
||||||
|
const part001 = startSketchAt([0, 0])
|
||||||
|
|> yLineTo(1, %)
|
||||||
|
|> xLine(3.84, %) // selection-range-7ish-before-this
|
||||||
|
|
||||||
|
const variableBelowShouldNotBeIncluded = 3
|
||||||
|
|
||||||
show(part001)"#;
|
show(part001)"#;
|
||||||
|
|
||||||
parse_execute(ast).await.unwrap();
|
parse_execute(ast).await.unwrap();
|
||||||
|
@ -5,6 +5,6 @@ pub mod errors;
|
|||||||
pub mod executor;
|
pub mod executor;
|
||||||
pub mod math_parser;
|
pub mod math_parser;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
pub mod recast;
|
pub mod server;
|
||||||
pub mod std;
|
pub mod std;
|
||||||
pub mod tokeniser;
|
pub mod tokeniser;
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,347 +0,0 @@
|
|||||||
//! Generates source code from the AST.
|
|
||||||
//! The inverse of parsing (which generates an AST from the source code)
|
|
||||||
|
|
||||||
use crate::abstract_syntax_tree_types::{
|
|
||||||
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, FunctionExpression, Literal,
|
|
||||||
LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, PipeExpression, Program, UnaryExpression,
|
|
||||||
Value,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn recast_literal(literal: Literal) -> String {
|
|
||||||
if let serde_json::Value::String(value) = literal.value {
|
|
||||||
let quote = if literal.raw.trim().starts_with('"') { '"' } else { '\'' };
|
|
||||||
format!("{}{}{}", quote, value, quote)
|
|
||||||
} else {
|
|
||||||
literal.value.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn precedence(operator: &str) -> u8 {
|
|
||||||
match operator {
|
|
||||||
"+" | "-" => 11,
|
|
||||||
"*" | "/" | "%" => 12,
|
|
||||||
_ => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_binary_expression(expression: BinaryExpression) -> String {
|
|
||||||
let maybe_wrap_it = |a: String, doit: bool| -> String {
|
|
||||||
if doit {
|
|
||||||
format!("({})", a)
|
|
||||||
} else {
|
|
||||||
a
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let should_wrap_right = match expression.right.clone() {
|
|
||||||
BinaryPart::BinaryExpression(bin_exp) => {
|
|
||||||
precedence(&expression.operator) > precedence(&bin_exp.operator) || expression.operator == "-"
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
let should_wrap_left = match expression.left.clone() {
|
|
||||||
BinaryPart::BinaryExpression(bin_exp) => precedence(&expression.operator) > precedence(&bin_exp.operator),
|
|
||||||
_ => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{} {} {}",
|
|
||||||
maybe_wrap_it(recast_binary_part(expression.left), should_wrap_left),
|
|
||||||
expression.operator,
|
|
||||||
maybe_wrap_it(recast_binary_part(expression.right), should_wrap_right)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_binary_part(part: BinaryPart) -> String {
|
|
||||||
match part {
|
|
||||||
BinaryPart::Literal(literal) => recast_literal(*literal),
|
|
||||||
BinaryPart::Identifier(identifier) => identifier.name,
|
|
||||||
BinaryPart::BinaryExpression(binary_expression) => recast_binary_expression(*binary_expression),
|
|
||||||
BinaryPart::CallExpression(call_expression) => recast_call_expression(&call_expression, "", false),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_value(node: Value, _indentation: String, is_in_pipe_expression: bool) -> String {
|
|
||||||
let indentation = _indentation + if is_in_pipe_expression { " " } else { "" };
|
|
||||||
match node {
|
|
||||||
Value::BinaryExpression(bin_exp) => recast_binary_expression(*bin_exp),
|
|
||||||
Value::ArrayExpression(array_exp) => recast_array_expression(&array_exp, &indentation),
|
|
||||||
Value::ObjectExpression(ref obj_exp) => recast_object_expression(obj_exp, &indentation, is_in_pipe_expression),
|
|
||||||
Value::MemberExpression(mem_exp) => recast_member_expression(*mem_exp),
|
|
||||||
Value::Literal(literal) => recast_literal(*literal),
|
|
||||||
Value::FunctionExpression(func_exp) => recast_function(*func_exp),
|
|
||||||
Value::CallExpression(call_exp) => recast_call_expression(&call_exp, &indentation, is_in_pipe_expression),
|
|
||||||
Value::Identifier(ident) => ident.name,
|
|
||||||
Value::PipeExpression(pipe_exp) => recast_pipe_expression(&pipe_exp),
|
|
||||||
Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_array_expression(expression: &ArrayExpression, indentation: &str) -> String {
|
|
||||||
let flat_recast = format!(
|
|
||||||
"[{}]",
|
|
||||||
expression
|
|
||||||
.elements
|
|
||||||
.iter()
|
|
||||||
.map(|el| recast_value(el.clone(), String::new(), false))
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
);
|
|
||||||
let max_array_length = 40;
|
|
||||||
if flat_recast.len() > max_array_length {
|
|
||||||
let _indentation = indentation.to_string() + " ";
|
|
||||||
format!(
|
|
||||||
"[\n{}{}\n{}]",
|
|
||||||
_indentation,
|
|
||||||
expression
|
|
||||||
.elements
|
|
||||||
.iter()
|
|
||||||
.map(|el| recast_value(el.clone(), _indentation.clone(), false))
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(format!(",\n{}", _indentation).as_str()),
|
|
||||||
indentation
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
flat_recast
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_object_expression(expression: &ObjectExpression, indentation: &str, is_in_pipe_expression: bool) -> String {
|
|
||||||
let flat_recast = format!(
|
|
||||||
"{{ {} }}",
|
|
||||||
expression
|
|
||||||
.properties
|
|
||||||
.iter()
|
|
||||||
.map(|prop| {
|
|
||||||
format!(
|
|
||||||
"{}: {}",
|
|
||||||
prop.key.name,
|
|
||||||
recast_value(prop.value.clone(), String::new(), false)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
);
|
|
||||||
let max_array_length = 40;
|
|
||||||
if flat_recast.len() > max_array_length {
|
|
||||||
let _indentation = indentation.to_owned() + " ";
|
|
||||||
format!(
|
|
||||||
"{{\n{}{}\n{}}}",
|
|
||||||
_indentation,
|
|
||||||
expression
|
|
||||||
.properties
|
|
||||||
.iter()
|
|
||||||
.map(|prop| {
|
|
||||||
format!(
|
|
||||||
"{}: {}",
|
|
||||||
prop.key.name,
|
|
||||||
recast_value(prop.value.clone(), _indentation.clone(), is_in_pipe_expression)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(format!(",\n{}", _indentation).as_str()),
|
|
||||||
if is_in_pipe_expression { " " } else { "" }
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
flat_recast
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_call_expression(expression: &CallExpression, indentation: &str, is_in_pipe_expression: bool) -> String {
|
|
||||||
format!(
|
|
||||||
"{}({})",
|
|
||||||
expression.callee.name,
|
|
||||||
expression
|
|
||||||
.arguments
|
|
||||||
.iter()
|
|
||||||
.map(|arg| recast_argument(arg.clone(), indentation, is_in_pipe_expression))
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", ")
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_argument(argument: Value, indentation: &str, is_in_pipe_expression: bool) -> String {
|
|
||||||
match argument {
|
|
||||||
Value::Literal(literal) => recast_literal(*literal),
|
|
||||||
Value::Identifier(identifier) => identifier.name,
|
|
||||||
Value::BinaryExpression(binary_exp) => recast_binary_expression(*binary_exp),
|
|
||||||
Value::ArrayExpression(array_exp) => recast_array_expression(&array_exp, indentation),
|
|
||||||
Value::ObjectExpression(object_exp) => {
|
|
||||||
recast_object_expression(&object_exp, indentation, is_in_pipe_expression)
|
|
||||||
}
|
|
||||||
Value::CallExpression(call_exp) => recast_call_expression(&call_exp, indentation, is_in_pipe_expression),
|
|
||||||
Value::FunctionExpression(function_exp) => recast_function(*function_exp),
|
|
||||||
Value::PipeSubstitution(_) => "%".to_string(),
|
|
||||||
Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp),
|
|
||||||
_ => String::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_member_expression(expression: MemberExpression) -> String {
|
|
||||||
let key_str = match expression.property {
|
|
||||||
LiteralIdentifier::Identifier(identifier) => {
|
|
||||||
if expression.computed {
|
|
||||||
format!("[{}]", &(*identifier.name))
|
|
||||||
} else {
|
|
||||||
format!(".{}", &(*identifier.name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LiteralIdentifier::Literal(lit) => format!("[{}]", &(*lit.raw)),
|
|
||||||
};
|
|
||||||
|
|
||||||
match expression.object {
|
|
||||||
MemberObject::MemberExpression(member_exp) => recast_member_expression(*member_exp) + key_str.as_str(),
|
|
||||||
MemberObject::Identifier(identifier) => identifier.name + key_str.as_str(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_pipe_expression(expression: &PipeExpression) -> String {
|
|
||||||
expression
|
|
||||||
.body
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(index, statement)| {
|
|
||||||
let mut indentation = " ".to_string();
|
|
||||||
let mut maybe_line_break = "\n".to_string();
|
|
||||||
let mut str = recast_value(statement.clone(), indentation.clone(), true);
|
|
||||||
let non_code_meta = expression.non_code_meta.clone();
|
|
||||||
if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) {
|
|
||||||
if non_code_meta_value.value != " " {
|
|
||||||
str += non_code_meta_value.value.as_str();
|
|
||||||
indentation = String::new();
|
|
||||||
maybe_line_break = String::new();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if index != expression.body.len() - 1 {
|
|
||||||
str += maybe_line_break.as_str();
|
|
||||||
str += indentation.as_str();
|
|
||||||
str += "|> ".to_string().as_str();
|
|
||||||
}
|
|
||||||
str
|
|
||||||
})
|
|
||||||
.collect::<String>()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recast_unary_expression(expression: UnaryExpression) -> String {
|
|
||||||
let bin_part_val = match expression.argument {
|
|
||||||
BinaryPart::Literal(literal) => Value::Literal(literal),
|
|
||||||
BinaryPart::Identifier(identifier) => Value::Identifier(identifier),
|
|
||||||
BinaryPart::BinaryExpression(binary_expression) => Value::BinaryExpression(binary_expression),
|
|
||||||
BinaryPart::CallExpression(call_expression) => Value::CallExpression(call_expression),
|
|
||||||
BinaryPart::UnaryExpression(unary_expression) => Value::UnaryExpression(unary_expression),
|
|
||||||
};
|
|
||||||
format!(
|
|
||||||
"{}{}",
|
|
||||||
expression.operator,
|
|
||||||
recast_value(bin_part_val, String::new(), false)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn recast(ast: &Program, indentation: &str, is_with_block: bool) -> String {
|
|
||||||
ast.body
|
|
||||||
.iter()
|
|
||||||
.map(|statement| match statement.clone() {
|
|
||||||
BodyItem::ExpressionStatement(expression_statement) => match expression_statement.expression {
|
|
||||||
Value::BinaryExpression(binary_expression) => recast_binary_expression(*binary_expression),
|
|
||||||
Value::ArrayExpression(array_expression) => recast_array_expression(&array_expression, ""),
|
|
||||||
Value::ObjectExpression(object_expression) => recast_object_expression(&object_expression, "", false),
|
|
||||||
Value::CallExpression(call_expression) => recast_call_expression(&call_expression, "", false),
|
|
||||||
_ => "Expression".to_string(),
|
|
||||||
},
|
|
||||||
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration
|
|
||||||
.declarations
|
|
||||||
.iter()
|
|
||||||
.map(|declaration| {
|
|
||||||
format!(
|
|
||||||
"{} {} = {}",
|
|
||||||
variable_declaration.kind,
|
|
||||||
declaration.id.name,
|
|
||||||
recast_value(declaration.init.clone(), String::new(), false)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect::<String>(),
|
|
||||||
BodyItem::ReturnStatement(return_statement) => {
|
|
||||||
format!("return {}", recast_argument(return_statement.argument, "", false))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.enumerate()
|
|
||||||
.map(|(index, recast_str)| {
|
|
||||||
let is_legit_custom_whitespace_or_comment = |str: String| str != " " && str != "\n" && str != " ";
|
|
||||||
|
|
||||||
// determine the value of startString
|
|
||||||
let last_white_space_or_comment = if index > 0 {
|
|
||||||
let tmp = if let Some(non_code_node) = ast.non_code_meta.none_code_nodes.get(&(index - 1)) {
|
|
||||||
non_code_node.value.clone()
|
|
||||||
} else {
|
|
||||||
" ".to_string()
|
|
||||||
};
|
|
||||||
tmp
|
|
||||||
} else {
|
|
||||||
" ".to_string()
|
|
||||||
};
|
|
||||||
// indentation of this line will be covered by the previous if we're using a custom whitespace or comment
|
|
||||||
let mut start_string = if is_legit_custom_whitespace_or_comment(last_white_space_or_comment) {
|
|
||||||
String::new()
|
|
||||||
} else {
|
|
||||||
indentation.to_owned()
|
|
||||||
};
|
|
||||||
if index == 0 {
|
|
||||||
if let Some(start) = ast.non_code_meta.start.clone() {
|
|
||||||
start_string = start.value;
|
|
||||||
} else {
|
|
||||||
start_string = indentation.to_owned();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if start_string.ends_with('\n') {
|
|
||||||
start_string += indentation;
|
|
||||||
}
|
|
||||||
|
|
||||||
// determine the value of endString
|
|
||||||
let maybe_line_break: String = if index == ast.body.len() - 1 && !is_with_block {
|
|
||||||
String::new()
|
|
||||||
} else {
|
|
||||||
"\n".to_string()
|
|
||||||
};
|
|
||||||
let mut custom_white_space_or_comment = match ast.non_code_meta.none_code_nodes.get(&index) {
|
|
||||||
Some(custom_white_space_or_comment) => custom_white_space_or_comment.value.clone(),
|
|
||||||
None => String::new(),
|
|
||||||
};
|
|
||||||
if !is_legit_custom_whitespace_or_comment(custom_white_space_or_comment.clone()) {
|
|
||||||
custom_white_space_or_comment = String::new();
|
|
||||||
}
|
|
||||||
let end_string = if custom_white_space_or_comment.is_empty() {
|
|
||||||
maybe_line_break
|
|
||||||
} else {
|
|
||||||
custom_white_space_or_comment
|
|
||||||
};
|
|
||||||
|
|
||||||
format!("{}{}{}", start_string, recast_str, end_string)
|
|
||||||
})
|
|
||||||
.collect::<String>()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn recast_function(expression: FunctionExpression) -> String {
|
|
||||||
format!(
|
|
||||||
"({}) => {{{}}}",
|
|
||||||
expression
|
|
||||||
.params
|
|
||||||
.iter()
|
|
||||||
.map(|param| param.name.clone())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(", "),
|
|
||||||
recast(
|
|
||||||
&Program {
|
|
||||||
start: expression.body.start,
|
|
||||||
end: expression.body.start,
|
|
||||||
body: expression.body.body,
|
|
||||||
non_code_meta: expression.body.non_code_meta
|
|
||||||
},
|
|
||||||
"",
|
|
||||||
true
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
672
src/wasm-lib/kcl/src/server/mod.rs
Normal file
672
src/wasm-lib/kcl/src/server/mod.rs
Normal file
@ -0,0 +1,672 @@
|
|||||||
|
//! Functions for the `kcl` lsp server.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use clap::Parser;
|
||||||
|
use dashmap::DashMap;
|
||||||
|
use tower_lsp::{jsonrpc::Result as RpcResult, lsp_types::*, Client, LanguageServer};
|
||||||
|
|
||||||
|
use crate::{abstract_syntax_tree_types::VariableKind, executor::SourceRange, parser::PIPE_OPERATOR};
|
||||||
|
|
||||||
|
/// A subcommand for running the server.
|
||||||
|
#[derive(Parser, Clone, Debug)]
|
||||||
|
pub struct Server {
|
||||||
|
/// Port that the server should listen
|
||||||
|
#[clap(long, default_value = "8080")]
|
||||||
|
pub socket: i32,
|
||||||
|
|
||||||
|
/// Listen over stdin and stdout instead of a tcp socket.
|
||||||
|
#[clap(short, long, default_value = "false")]
|
||||||
|
pub stdio: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The lsp server backend.
|
||||||
|
pub struct Backend {
|
||||||
|
/// The client for the backend.
|
||||||
|
pub client: Client,
|
||||||
|
/// The stdlib completions for the language.
|
||||||
|
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||||
|
/// The stdlib signatures for the language.
|
||||||
|
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||||
|
/// The types of tokens the server supports.
|
||||||
|
pub token_types: Vec<SemanticTokenType>,
|
||||||
|
/// Token maps.
|
||||||
|
pub token_map: DashMap<String, Vec<crate::tokeniser::Token>>,
|
||||||
|
/// AST maps.
|
||||||
|
pub ast_map: DashMap<String, crate::abstract_syntax_tree_types::Program>,
|
||||||
|
/// Current code.
|
||||||
|
pub current_code_map: DashMap<String, String>,
|
||||||
|
/// Diagnostics.
|
||||||
|
pub diagnostics_map: DashMap<String, DocumentDiagnosticReport>,
|
||||||
|
/// Symbols map.
|
||||||
|
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
|
||||||
|
/// Semantic tokens map.
|
||||||
|
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backend {
|
||||||
|
fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<usize> {
|
||||||
|
self.token_types.iter().position(|x| *x == token_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn on_change(&self, params: TextDocumentItem) {
|
||||||
|
// Lets update the tokens.
|
||||||
|
self.current_code_map
|
||||||
|
.insert(params.uri.to_string(), params.text.clone());
|
||||||
|
let tokens = crate::tokeniser::lexer(¶ms.text);
|
||||||
|
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
||||||
|
|
||||||
|
// Update the semantic tokens map.
|
||||||
|
let mut semantic_tokens = vec![];
|
||||||
|
let mut last_position = Position::new(0, 0);
|
||||||
|
for token in &tokens {
|
||||||
|
let Ok(mut token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||||
|
// We continue here because not all tokens can be converted this way, we will get
|
||||||
|
// the rest from the ast.
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
if token.token_type == crate::tokeniser::TokenType::Word
|
||||||
|
&& self.stdlib_completions.contains_key(&token.value)
|
||||||
|
{
|
||||||
|
// This is a stdlib function.
|
||||||
|
token_type = SemanticTokenType::FUNCTION;
|
||||||
|
}
|
||||||
|
|
||||||
|
let token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||||
|
Some(index) => index,
|
||||||
|
// This is actually bad this should not fail.
|
||||||
|
// TODO: ensure we never get here.
|
||||||
|
None => {
|
||||||
|
self.client
|
||||||
|
.log_message(
|
||||||
|
MessageType::INFO,
|
||||||
|
format!("token type `{:?}` not accounted for", token_type),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let source_range: SourceRange = token.clone().into();
|
||||||
|
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||||
|
|
||||||
|
let semantic_token = SemanticToken {
|
||||||
|
delta_line: position.line - last_position.line,
|
||||||
|
delta_start: if position.line != last_position.line {
|
||||||
|
position.character
|
||||||
|
} else {
|
||||||
|
position.character - last_position.character
|
||||||
|
},
|
||||||
|
length: token.value.len() as u32,
|
||||||
|
token_type: token_type_index as u32,
|
||||||
|
token_modifiers_bitset: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
semantic_tokens.push(semantic_token);
|
||||||
|
|
||||||
|
last_position = position;
|
||||||
|
}
|
||||||
|
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
|
||||||
|
|
||||||
|
// Lets update the ast.
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let result = parser.ast();
|
||||||
|
let ast = match result {
|
||||||
|
Ok(ast) => ast,
|
||||||
|
Err(e) => {
|
||||||
|
let diagnostic = e.to_lsp_diagnostic(¶ms.text);
|
||||||
|
// We got errors, update the diagnostics.
|
||||||
|
self.diagnostics_map.insert(
|
||||||
|
params.uri.to_string(),
|
||||||
|
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||||
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: vec![diagnostic.clone()],
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Publish the diagnostic.
|
||||||
|
// If the client supports it.
|
||||||
|
self.client
|
||||||
|
.publish_diagnostics(params.uri, vec![diagnostic], None)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update the symbols map.
|
||||||
|
self.symbols_map
|
||||||
|
.insert(params.uri.to_string(), ast.get_lsp_symbols(¶ms.text));
|
||||||
|
|
||||||
|
self.ast_map.insert(params.uri.to_string(), ast);
|
||||||
|
// Lets update the diagnostics, since we got no errors.
|
||||||
|
self.diagnostics_map.insert(
|
||||||
|
params.uri.to_string(),
|
||||||
|
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||||
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: vec![],
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Publish the diagnostic, we reset it here so the client knows the code compiles now.
|
||||||
|
// If the client supports it.
|
||||||
|
self.client.publish_diagnostics(params.uri.clone(), vec![], None).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||||
|
let mut completions = vec![];
|
||||||
|
|
||||||
|
let ast = match self.ast_map.get(file_name) {
|
||||||
|
Some(ast) => ast,
|
||||||
|
None => return completions,
|
||||||
|
};
|
||||||
|
|
||||||
|
for item in &ast.body {
|
||||||
|
match item {
|
||||||
|
crate::abstract_syntax_tree_types::BodyItem::ExpressionStatement(_) => continue,
|
||||||
|
crate::abstract_syntax_tree_types::BodyItem::ReturnStatement(_) => continue,
|
||||||
|
crate::abstract_syntax_tree_types::BodyItem::VariableDeclaration(variable) => {
|
||||||
|
// We only want to complete variables.
|
||||||
|
for declaration in &variable.declarations {
|
||||||
|
completions.push(CompletionItem {
|
||||||
|
label: declaration.id.name.to_string(),
|
||||||
|
label_details: None,
|
||||||
|
kind: Some(match variable.kind {
|
||||||
|
crate::abstract_syntax_tree_types::VariableKind::Let => CompletionItemKind::VARIABLE,
|
||||||
|
crate::abstract_syntax_tree_types::VariableKind::Const => CompletionItemKind::CONSTANT,
|
||||||
|
crate::abstract_syntax_tree_types::VariableKind::Var => CompletionItemKind::VARIABLE,
|
||||||
|
crate::abstract_syntax_tree_types::VariableKind::Fn => CompletionItemKind::FUNCTION,
|
||||||
|
}),
|
||||||
|
detail: Some(variable.kind.to_string()),
|
||||||
|
documentation: None,
|
||||||
|
deprecated: None,
|
||||||
|
preselect: None,
|
||||||
|
sort_text: None,
|
||||||
|
filter_text: None,
|
||||||
|
insert_text: None,
|
||||||
|
insert_text_format: None,
|
||||||
|
insert_text_mode: None,
|
||||||
|
text_edit: None,
|
||||||
|
additional_text_edits: None,
|
||||||
|
command: None,
|
||||||
|
commit_characters: None,
|
||||||
|
data: None,
|
||||||
|
tags: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
completions
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tower_lsp::async_trait]
|
||||||
|
impl LanguageServer for Backend {
|
||||||
|
async fn initialize(&self, params: InitializeParams) -> RpcResult<InitializeResult> {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, format!("initialize: {:?}", params))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
Ok(InitializeResult {
|
||||||
|
capabilities: ServerCapabilities {
|
||||||
|
completion_provider: Some(CompletionOptions {
|
||||||
|
resolve_provider: Some(false),
|
||||||
|
trigger_characters: Some(vec![".".to_string()]),
|
||||||
|
work_done_progress_options: Default::default(),
|
||||||
|
all_commit_characters: None,
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
diagnostic_provider: Some(DiagnosticServerCapabilities::Options(DiagnosticOptions {
|
||||||
|
..Default::default()
|
||||||
|
})),
|
||||||
|
document_formatting_provider: Some(OneOf::Left(true)),
|
||||||
|
document_symbol_provider: Some(OneOf::Left(true)),
|
||||||
|
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
||||||
|
inlay_hint_provider: Some(OneOf::Left(true)),
|
||||||
|
rename_provider: Some(OneOf::Left(true)),
|
||||||
|
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
|
||||||
|
SemanticTokensRegistrationOptions {
|
||||||
|
text_document_registration_options: {
|
||||||
|
TextDocumentRegistrationOptions {
|
||||||
|
document_selector: Some(vec![DocumentFilter {
|
||||||
|
language: Some("kcl".to_string()),
|
||||||
|
scheme: Some("file".to_string()),
|
||||||
|
pattern: None,
|
||||||
|
}]),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
semantic_tokens_options: SemanticTokensOptions {
|
||||||
|
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||||
|
legend: SemanticTokensLegend {
|
||||||
|
token_types: self.token_types.clone(),
|
||||||
|
token_modifiers: vec![],
|
||||||
|
},
|
||||||
|
range: Some(false),
|
||||||
|
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||||
|
},
|
||||||
|
static_registration_options: StaticRegistrationOptions::default(),
|
||||||
|
},
|
||||||
|
)),
|
||||||
|
signature_help_provider: Some(SignatureHelpOptions {
|
||||||
|
trigger_characters: None,
|
||||||
|
retrigger_characters: None,
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||||
|
open_close: Some(true),
|
||||||
|
change: Some(TextDocumentSyncKind::FULL),
|
||||||
|
..Default::default()
|
||||||
|
})),
|
||||||
|
workspace: Some(WorkspaceServerCapabilities {
|
||||||
|
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
||||||
|
supported: Some(true),
|
||||||
|
change_notifications: Some(OneOf::Left(true)),
|
||||||
|
}),
|
||||||
|
file_operations: None,
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn initialized(&self, params: InitializedParams) {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, format!("initialized: {:?}", params))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown(&self) -> RpcResult<()> {
|
||||||
|
self.client.log_message(MessageType::INFO, "shutdown".to_string()).await;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, "workspace folders changed!")
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_change_configuration(&self, _: DidChangeConfigurationParams) {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, "configuration changed!")
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_change_watched_files(&self, _: DidChangeWatchedFilesParams) {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, "watched files have changed!")
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||||
|
self.on_change(TextDocumentItem {
|
||||||
|
uri: params.text_document.uri,
|
||||||
|
text: params.text_document.text,
|
||||||
|
version: params.text_document.version,
|
||||||
|
language_id: params.text_document.language_id,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_change(&self, mut params: DidChangeTextDocumentParams) {
|
||||||
|
self.on_change(TextDocumentItem {
|
||||||
|
uri: params.text_document.uri,
|
||||||
|
text: std::mem::take(&mut params.content_changes[0].text),
|
||||||
|
version: params.text_document.version,
|
||||||
|
language_id: Default::default(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||||
|
if let Some(text) = params.text {
|
||||||
|
self.on_change(TextDocumentItem {
|
||||||
|
uri: params.text_document.uri,
|
||||||
|
text,
|
||||||
|
version: Default::default(),
|
||||||
|
language_id: Default::default(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_close(&self, _: DidCloseTextDocumentParams) {
|
||||||
|
self.client.log_message(MessageType::INFO, "file closed!").await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
|
||||||
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(current_code) = self.current_code_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let pos = position_to_char_index(params.text_document_position_params.position, ¤t_code);
|
||||||
|
|
||||||
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
|
let Some(ast) = self.ast_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(value) = ast.get_value_for_position(pos) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(hover) = value.get_hover_value_for_position(pos, ¤t_code) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
match hover {
|
||||||
|
crate::abstract_syntax_tree_types::Hover::Function { name, range } => {
|
||||||
|
// Get the docs for this function.
|
||||||
|
let Some(completion) = self.stdlib_completions.get(&name) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
let Some(docs) = &completion.documentation else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let docs = match docs {
|
||||||
|
Documentation::String(docs) => docs,
|
||||||
|
Documentation::MarkupContent(MarkupContent { value, .. }) => value,
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(label_details) = &completion.label_details else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(Hover {
|
||||||
|
contents: HoverContents::Markup(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: format!(
|
||||||
|
"```{}{}```\n{}",
|
||||||
|
name,
|
||||||
|
label_details.detail.clone().unwrap_or_default(),
|
||||||
|
docs
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
range: Some(range),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
crate::abstract_syntax_tree_types::Hover::Signature { .. } => Ok(None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn completion(&self, params: CompletionParams) -> RpcResult<Option<CompletionResponse>> {
|
||||||
|
let mut completions = vec![CompletionItem {
|
||||||
|
label: PIPE_OPERATOR.to_string(),
|
||||||
|
label_details: None,
|
||||||
|
kind: Some(CompletionItemKind::OPERATOR),
|
||||||
|
detail: Some("A pipe operator.".to_string()),
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::Markdown,
|
||||||
|
value: "A pipe operator.".to_string(),
|
||||||
|
})),
|
||||||
|
deprecated: Some(false),
|
||||||
|
preselect: None,
|
||||||
|
sort_text: None,
|
||||||
|
filter_text: None,
|
||||||
|
insert_text: Some("|> ".to_string()),
|
||||||
|
insert_text_format: Some(InsertTextFormat::PLAIN_TEXT),
|
||||||
|
insert_text_mode: None,
|
||||||
|
text_edit: None,
|
||||||
|
additional_text_edits: None,
|
||||||
|
command: None,
|
||||||
|
commit_characters: None,
|
||||||
|
data: None,
|
||||||
|
tags: None,
|
||||||
|
}];
|
||||||
|
|
||||||
|
completions.extend(self.stdlib_completions.values().cloned());
|
||||||
|
|
||||||
|
// Get our variables from our AST to include in our completions.
|
||||||
|
completions.extend(
|
||||||
|
self.completions_get_variables_from_ast(params.text_document_position.text_document.uri.as_ref())
|
||||||
|
.await,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(Some(CompletionResponse::Array(completions)))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn diagnostic(&self, params: DocumentDiagnosticParams) -> RpcResult<DocumentDiagnosticReportResult> {
|
||||||
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
// Get the current diagnostics for this file.
|
||||||
|
let Some(diagnostic) = self.diagnostics_map.get(&filename) else {
|
||||||
|
// Send an empty report.
|
||||||
|
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
|
||||||
|
RelatedFullDocumentDiagnosticReport {
|
||||||
|
related_documents: None,
|
||||||
|
full_document_diagnostic_report: FullDocumentDiagnosticReport {
|
||||||
|
result_id: None,
|
||||||
|
items: vec![],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)));
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(DocumentDiagnosticReportResult::Report(diagnostic.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
|
||||||
|
let filename = params.text_document_position_params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(current_code) = self.current_code_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let pos = position_to_char_index(params.text_document_position_params.position, ¤t_code);
|
||||||
|
|
||||||
|
// Let's iterate over the AST and find the node that contains the cursor.
|
||||||
|
let Some(ast) = self.ast_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(value) = ast.get_value_for_position(pos) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(hover) = value.get_hover_value_for_position(pos, ¤t_code) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
match hover {
|
||||||
|
crate::abstract_syntax_tree_types::Hover::Function { name, range: _ } => {
|
||||||
|
// Get the docs for this function.
|
||||||
|
let Some(signature) = self.stdlib_signatures.get(&name) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(signature.clone()))
|
||||||
|
}
|
||||||
|
crate::abstract_syntax_tree_types::Hover::Signature {
|
||||||
|
name,
|
||||||
|
parameter_index,
|
||||||
|
range: _,
|
||||||
|
} => {
|
||||||
|
let Some(signature) = self.stdlib_signatures.get(&name) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut signature = signature.clone();
|
||||||
|
|
||||||
|
signature.active_parameter = Some(parameter_index);
|
||||||
|
|
||||||
|
Ok(Some(signature.clone()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn inlay_hint(&self, _params: InlayHintParams) -> RpcResult<Option<Vec<InlayHint>>> {
|
||||||
|
// TODO: do this
|
||||||
|
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
|
||||||
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
|
||||||
|
result_id: None,
|
||||||
|
data: semantic_tokens.clone(),
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
|
||||||
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(symbols) = self.symbols_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(DocumentSymbolResponse::Nested(symbols.clone())))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
|
||||||
|
let filename = params.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(current_code) = self.current_code_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse the ast.
|
||||||
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
|
let tokens = crate::tokeniser::lexer(¤t_code);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let Ok(ast) = parser.ast() else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
// Now recast it.
|
||||||
|
let recast = ast.recast(
|
||||||
|
&crate::abstract_syntax_tree_types::FormatOptions {
|
||||||
|
tab_size: params.options.tab_size as usize,
|
||||||
|
insert_final_newline: params.options.insert_final_newline.unwrap_or(false),
|
||||||
|
use_tabs: !params.options.insert_spaces,
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
let source_range = SourceRange([0, current_code.len() - 1]);
|
||||||
|
let range = source_range.to_lsp_range(¤t_code);
|
||||||
|
Ok(Some(vec![TextEdit {
|
||||||
|
new_text: recast,
|
||||||
|
range,
|
||||||
|
}]))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
|
||||||
|
let filename = params.text_document_position.text_document.uri.to_string();
|
||||||
|
|
||||||
|
let Some(current_code) = self.current_code_map.get(&filename) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse the ast.
|
||||||
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
|
let tokens = crate::tokeniser::lexer(¤t_code);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let Ok(mut ast) = parser.ast() else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Let's convert the position to a character index.
|
||||||
|
let pos = position_to_char_index(params.text_document_position.position, ¤t_code);
|
||||||
|
// Now let's perform the rename on the ast.
|
||||||
|
ast.rename_symbol(¶ms.new_name, pos);
|
||||||
|
// Now recast it.
|
||||||
|
let recast = ast.recast(&Default::default(), 0);
|
||||||
|
let source_range = SourceRange([0, current_code.len() - 1]);
|
||||||
|
let range = source_range.to_lsp_range(¤t_code);
|
||||||
|
Ok(Some(WorkspaceEdit {
|
||||||
|
changes: Some(HashMap::from([(
|
||||||
|
params.text_document_position.text_document.uri,
|
||||||
|
vec![TextEdit {
|
||||||
|
new_text: recast,
|
||||||
|
range,
|
||||||
|
}],
|
||||||
|
)])),
|
||||||
|
document_changes: None,
|
||||||
|
change_annotations: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get completions from our stdlib.
|
||||||
|
pub fn get_completions_from_stdlib(stdlib: &crate::std::StdLib) -> Result<HashMap<String, CompletionItem>> {
|
||||||
|
let mut completions = HashMap::new();
|
||||||
|
|
||||||
|
for internal_fn in stdlib.fns.values() {
|
||||||
|
completions.insert(internal_fn.name(), internal_fn.to_completion_item());
|
||||||
|
}
|
||||||
|
|
||||||
|
let variable_kinds = VariableKind::to_completion_items()?;
|
||||||
|
for variable_kind in variable_kinds {
|
||||||
|
completions.insert(variable_kind.label.clone(), variable_kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(completions)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get signatures from our stdlib.
|
||||||
|
pub fn get_signatures_from_stdlib(stdlib: &crate::std::StdLib) -> Result<HashMap<String, SignatureHelp>> {
|
||||||
|
let mut signatures = HashMap::new();
|
||||||
|
|
||||||
|
for internal_fn in stdlib.fns.values() {
|
||||||
|
signatures.insert(internal_fn.name(), internal_fn.to_signature_help());
|
||||||
|
}
|
||||||
|
|
||||||
|
let show = SignatureHelp {
|
||||||
|
signatures: vec![SignatureInformation {
|
||||||
|
label: "show".to_string(),
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::PlainText,
|
||||||
|
value: "Show a model.".to_string(),
|
||||||
|
})),
|
||||||
|
parameters: Some(vec![ParameterInformation {
|
||||||
|
label: ParameterLabel::Simple("sg: SketchGroup".to_string()),
|
||||||
|
documentation: Some(Documentation::MarkupContent(MarkupContent {
|
||||||
|
kind: MarkupKind::PlainText,
|
||||||
|
value: "A sketch group.".to_string(),
|
||||||
|
})),
|
||||||
|
}]),
|
||||||
|
active_parameter: None,
|
||||||
|
}],
|
||||||
|
active_signature: Some(0),
|
||||||
|
active_parameter: None,
|
||||||
|
};
|
||||||
|
signatures.insert("show".to_string(), show);
|
||||||
|
|
||||||
|
Ok(signatures)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert a position to a character index from the start of the file.
|
||||||
|
fn position_to_char_index(position: Position, code: &str) -> usize {
|
||||||
|
// Get the character position from the start of the file.
|
||||||
|
let mut char_position = 0;
|
||||||
|
for (index, line) in code.lines().enumerate() {
|
||||||
|
if index == position.line as usize {
|
||||||
|
char_position += position.character as usize;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
char_position += line.len() + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
char_position
|
||||||
|
}
|
@ -5,9 +5,6 @@ pub mod segment;
|
|||||||
pub mod sketch;
|
pub mod sketch;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|
||||||
// TODO: Something that would be nice is if we could generate docs for Kcl based on the
|
|
||||||
// actual stdlib functions below.
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
@ -23,18 +20,17 @@ use crate::{
|
|||||||
executor::{ExtrudeGroup, MemoryItem, Metadata, SketchGroup, SourceRange},
|
executor::{ExtrudeGroup, MemoryItem, Metadata, SketchGroup, SourceRange},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type FnMap = HashMap<String, StdFn>;
|
|
||||||
pub type StdFn = fn(&mut Args) -> Result<MemoryItem, KclError>;
|
pub type StdFn = fn(&mut Args) -> Result<MemoryItem, KclError>;
|
||||||
|
pub type FnMap = HashMap<String, StdFn>;
|
||||||
|
|
||||||
pub struct StdLib {
|
pub struct StdLib {
|
||||||
pub internal_fn_names: Vec<Box<(dyn crate::docs::StdLibFn)>>,
|
pub fns: HashMap<String, Box<(dyn crate::docs::StdLibFn)>>,
|
||||||
|
|
||||||
pub fns: FnMap,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StdLib {
|
impl StdLib {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let internal_fn_names: Vec<Box<(dyn crate::docs::StdLibFn)>> = vec![
|
let internal_fns: Vec<Box<(dyn crate::docs::StdLibFn)>> = vec![
|
||||||
|
Box::new(Show),
|
||||||
Box::new(Min),
|
Box::new(Min),
|
||||||
Box::new(LegLen),
|
Box::new(LegLen),
|
||||||
Box::new(LegAngX),
|
Box::new(LegAngX),
|
||||||
@ -68,11 +64,15 @@ impl StdLib {
|
|||||||
];
|
];
|
||||||
|
|
||||||
let mut fns = HashMap::new();
|
let mut fns = HashMap::new();
|
||||||
for internal_fn_name in &internal_fn_names {
|
for internal_fn in &internal_fns {
|
||||||
fns.insert(internal_fn_name.name().to_string(), internal_fn_name.std_lib_fn());
|
fns.insert(internal_fn.name().to_string(), internal_fn.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
Self { internal_fn_names, fns }
|
Self { fns }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&self, name: &str) -> Option<Box<dyn crate::docs::StdLibFn>> {
|
||||||
|
self.fns.get(name).cloned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -407,7 +407,6 @@ impl<'a> Args<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the minimum of the given arguments.
|
/// Returns the minimum of the given arguments.
|
||||||
/// TODO fix min
|
|
||||||
pub fn min(args: &mut Args) -> Result<MemoryItem, KclError> {
|
pub fn min(args: &mut Args) -> Result<MemoryItem, KclError> {
|
||||||
let nums = args.get_number_array()?;
|
let nums = args.get_number_array()?;
|
||||||
let result = inner_min(nums);
|
let result = inner_min(nums);
|
||||||
@ -430,6 +429,21 @@ fn inner_min(args: Vec<f64>) -> f64 {
|
|||||||
min
|
min
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Render a model.
|
||||||
|
// This never actually gets called so this is fine.
|
||||||
|
pub fn show(args: &mut Args) -> Result<MemoryItem, KclError> {
|
||||||
|
let sketch_group = args.get_sketch_group()?;
|
||||||
|
inner_show(sketch_group);
|
||||||
|
|
||||||
|
args.make_user_val_from_f64(0.0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render a model.
|
||||||
|
#[stdlib {
|
||||||
|
name = "show",
|
||||||
|
}]
|
||||||
|
fn inner_show(_sketch: SketchGroup) {}
|
||||||
|
|
||||||
/// Returns the length of the given leg.
|
/// Returns the length of the given leg.
|
||||||
pub fn leg_length(args: &mut Args) -> Result<MemoryItem, KclError> {
|
pub fn leg_length(args: &mut Args) -> Result<MemoryItem, KclError> {
|
||||||
let (hypotenuse, leg) = args.get_hypotenuse_leg()?;
|
let (hypotenuse, leg) = args.get_hypotenuse_leg()?;
|
||||||
@ -493,6 +507,7 @@ pub enum Primitive {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::std::StdLib;
|
use crate::std::StdLib;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_generate_stdlib_markdown_docs() {
|
fn test_generate_stdlib_markdown_docs() {
|
||||||
@ -508,7 +523,8 @@ mod tests {
|
|||||||
|
|
||||||
buf.push_str("* [Functions](#functions)\n");
|
buf.push_str("* [Functions](#functions)\n");
|
||||||
|
|
||||||
for internal_fn in &stdlib.internal_fn_names {
|
for key in stdlib.fns.keys().sorted() {
|
||||||
|
let internal_fn = stdlib.fns.get(key).unwrap();
|
||||||
if internal_fn.unpublished() || internal_fn.deprecated() {
|
if internal_fn.unpublished() || internal_fn.deprecated() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -520,7 +536,8 @@ mod tests {
|
|||||||
|
|
||||||
buf.push_str("## Functions\n\n");
|
buf.push_str("## Functions\n\n");
|
||||||
|
|
||||||
for internal_fn in &stdlib.internal_fn_names {
|
for key in stdlib.fns.keys().sorted() {
|
||||||
|
let internal_fn = stdlib.fns.get(key).unwrap();
|
||||||
if internal_fn.unpublished() {
|
if internal_fn.unpublished() {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -555,17 +572,18 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn_docs.push_str("\n#### Returns\n\n");
|
if let Some(return_type) = internal_fn.return_value() {
|
||||||
let return_type = internal_fn.return_value();
|
fn_docs.push_str("\n#### Returns\n\n");
|
||||||
if let Some(description) = return_type.description() {
|
if let Some(description) = return_type.description() {
|
||||||
fn_docs.push_str(&format!("* `{}` - {}\n", return_type.type_, description));
|
fn_docs.push_str(&format!("* `{}` - {}\n", return_type.type_, description));
|
||||||
} else {
|
} else {
|
||||||
fn_docs.push_str(&format!("* `{}`\n", return_type.type_));
|
fn_docs.push_str(&format!("* `{}`\n", return_type.type_));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (format, should_be_indented) = return_type.get_type_string().unwrap();
|
let (format, should_be_indented) = return_type.get_type_string().unwrap();
|
||||||
if should_be_indented {
|
if should_be_indented {
|
||||||
fn_docs.push_str(&format!("```\n{}\n```\n", format));
|
fn_docs.push_str(&format!("```\n{}\n```\n", format));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn_docs.push_str("\n\n\n");
|
fn_docs.push_str("\n\n\n");
|
||||||
@ -582,7 +600,8 @@ mod tests {
|
|||||||
|
|
||||||
let mut json_data = vec![];
|
let mut json_data = vec![];
|
||||||
|
|
||||||
for internal_fn in &stdlib.internal_fn_names {
|
for key in stdlib.fns.keys().sorted() {
|
||||||
|
let internal_fn = stdlib.fns.get(key).unwrap();
|
||||||
json_data.push(internal_fn.to_json().unwrap());
|
json_data.push(internal_fn.to_json().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,22 +1,110 @@
|
|||||||
use lazy_static::lazy_static;
|
use std::str::FromStr;
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS)]
|
use anyhow::Result;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use parse_display::{Display, FromStr};
|
||||||
|
use regex::Regex;
|
||||||
|
use schemars::JsonSchema;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use tower_lsp::lsp_types::SemanticTokenType;
|
||||||
|
|
||||||
|
/// The types of tokens.
|
||||||
|
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
#[serde(rename_all = "lowercase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
#[display(style = "camelCase")]
|
||||||
pub enum TokenType {
|
pub enum TokenType {
|
||||||
|
/// A number.
|
||||||
Number,
|
Number,
|
||||||
|
/// A word.
|
||||||
Word,
|
Word,
|
||||||
|
/// An operator.
|
||||||
Operator,
|
Operator,
|
||||||
|
/// A string.
|
||||||
String,
|
String,
|
||||||
|
/// A keyword.
|
||||||
|
Keyword,
|
||||||
|
/// A brace.
|
||||||
Brace,
|
Brace,
|
||||||
|
/// Whitespace.
|
||||||
Whitespace,
|
Whitespace,
|
||||||
|
/// A comma.
|
||||||
Comma,
|
Comma,
|
||||||
|
/// A colon.
|
||||||
Colon,
|
Colon,
|
||||||
|
/// A period.
|
||||||
Period,
|
Period,
|
||||||
|
/// A line comment.
|
||||||
LineComment,
|
LineComment,
|
||||||
|
/// A block comment.
|
||||||
BlockComment,
|
BlockComment,
|
||||||
|
/// A function name.
|
||||||
|
Function,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<TokenType> for SemanticTokenType {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
fn try_from(token_type: TokenType) -> Result<Self> {
|
||||||
|
Ok(match token_type {
|
||||||
|
TokenType::Number => Self::NUMBER,
|
||||||
|
TokenType::Word => Self::VARIABLE,
|
||||||
|
TokenType::Keyword => Self::KEYWORD,
|
||||||
|
TokenType::Operator => Self::OPERATOR,
|
||||||
|
TokenType::String => Self::STRING,
|
||||||
|
TokenType::LineComment => Self::COMMENT,
|
||||||
|
TokenType::BlockComment => Self::COMMENT,
|
||||||
|
TokenType::Function => Self::FUNCTION,
|
||||||
|
TokenType::Whitespace | TokenType::Brace | TokenType::Comma | TokenType::Colon | TokenType::Period => {
|
||||||
|
anyhow::bail!("unsupported token type: {:?}", token_type)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenType {
|
||||||
|
// This is for the lsp server.
|
||||||
|
pub fn to_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
|
||||||
|
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||||
|
settings.inline_subschemas = true;
|
||||||
|
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||||
|
|
||||||
|
let schema = TokenType::json_schema(&mut generator);
|
||||||
|
let schemars::schema::Schema::Object(o) = &schema else {
|
||||||
|
anyhow::bail!("expected object schema: {:#?}", schema);
|
||||||
|
};
|
||||||
|
let Some(subschemas) = &o.subschemas else {
|
||||||
|
anyhow::bail!("expected subschemas: {:#?}", schema);
|
||||||
|
};
|
||||||
|
let Some(one_ofs) = &subschemas.one_of else {
|
||||||
|
anyhow::bail!("expected one_of: {:#?}", schema);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut semantic_tokens = vec![];
|
||||||
|
for one_of in one_ofs {
|
||||||
|
let schemars::schema::Schema::Object(o) = one_of else {
|
||||||
|
anyhow::bail!("expected object one_of: {:#?}", one_of);
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(enum_values) = o.enum_values.as_ref() else {
|
||||||
|
anyhow::bail!("expected enum values: {:#?}", o);
|
||||||
|
};
|
||||||
|
|
||||||
|
if enum_values.len() > 1 {
|
||||||
|
anyhow::bail!("expected only one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
if enum_values.is_empty() {
|
||||||
|
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
||||||
|
}
|
||||||
|
|
||||||
|
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
|
||||||
|
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
|
||||||
|
semantic_tokens.push(semantic_token_type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(semantic_tokens)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||||
@ -45,8 +133,11 @@ lazy_static! {
|
|||||||
static ref NUMBER: Regex = Regex::new(r"^-?\d+(\.\d+)?").unwrap();
|
static ref NUMBER: Regex = Regex::new(r"^-?\d+(\.\d+)?").unwrap();
|
||||||
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
|
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
|
||||||
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
|
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
|
||||||
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
|
// TODO: these should be generated using our struct types for these.
|
||||||
|
static ref KEYWORD: Regex =
|
||||||
|
Regex::new(r"^(if|else|for|while|return|break|continue|fn|let|true|false|nil|and|or|not|var|const)\b").unwrap();
|
||||||
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
|
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
|
||||||
|
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
|
||||||
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
|
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
|
||||||
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
|
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
|
||||||
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
|
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
|
||||||
@ -69,6 +160,9 @@ fn is_whitespace(character: &str) -> bool {
|
|||||||
fn is_word(character: &str) -> bool {
|
fn is_word(character: &str) -> bool {
|
||||||
WORD.is_match(character)
|
WORD.is_match(character)
|
||||||
}
|
}
|
||||||
|
fn is_keyword(character: &str) -> bool {
|
||||||
|
KEYWORD.is_match(character)
|
||||||
|
}
|
||||||
fn is_string(character: &str) -> bool {
|
fn is_string(character: &str) -> bool {
|
||||||
match STRING.find(character) {
|
match STRING.find(character) {
|
||||||
Some(m) => m.start() == 0,
|
Some(m) => m.start() == 0,
|
||||||
@ -112,8 +206,8 @@ fn is_block_comment(character: &str) -> bool {
|
|||||||
BLOCKCOMMENT.is_match(character)
|
BLOCKCOMMENT.is_match(character)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_first(str: &str, regex: &Regex) -> Option<String> {
|
fn match_first(s: &str, regex: &Regex) -> Option<String> {
|
||||||
regex.find(str).map(|the_match| the_match.as_str().to_string())
|
regex.find(s).map(|the_match| the_match.as_str().to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
||||||
@ -125,8 +219,8 @@ fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
|
||||||
let str_from_index = &str[start_index..];
|
let str_from_index = &s.chars().skip(start_index).collect::<String>();
|
||||||
if is_string(str_from_index) {
|
if is_string(str_from_index) {
|
||||||
return Some(make_token(
|
return Some(make_token(
|
||||||
TokenType::String,
|
TokenType::String,
|
||||||
@ -216,6 +310,13 @@ fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
|||||||
start_index,
|
start_index,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
if is_keyword(str_from_index) {
|
||||||
|
return Some(make_token(
|
||||||
|
TokenType::Keyword,
|
||||||
|
&match_first(str_from_index, &KEYWORD)?,
|
||||||
|
start_index,
|
||||||
|
));
|
||||||
|
}
|
||||||
if is_word(str_from_index) {
|
if is_word(str_from_index) {
|
||||||
return Some(make_token(
|
return Some(make_token(
|
||||||
TokenType::Word,
|
TokenType::Word,
|
||||||
@ -247,21 +348,22 @@ fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lexer(str: &str) -> Vec<Token> {
|
fn recursively_tokenise(s: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
|
||||||
fn recursively_tokenise(str: &str, current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
|
if current_index >= s.len() {
|
||||||
if current_index >= str.len() {
|
return previous_tokens;
|
||||||
return previous_tokens;
|
|
||||||
}
|
|
||||||
let token = return_token_at_index(str, current_index);
|
|
||||||
let Some(token) = token else {
|
|
||||||
return recursively_tokenise(str, current_index + 1, previous_tokens);
|
|
||||||
};
|
|
||||||
let mut new_tokens = previous_tokens;
|
|
||||||
let token_length = token.value.len();
|
|
||||||
new_tokens.push(token);
|
|
||||||
recursively_tokenise(str, current_index + token_length, new_tokens)
|
|
||||||
}
|
}
|
||||||
recursively_tokenise(str, 0, Vec::new())
|
let token = return_token_at_index(s, current_index);
|
||||||
|
let Some(token) = token else {
|
||||||
|
return recursively_tokenise(s, current_index + 1, previous_tokens);
|
||||||
|
};
|
||||||
|
let mut new_tokens = previous_tokens;
|
||||||
|
let token_length = token.value.len();
|
||||||
|
new_tokens.push(token);
|
||||||
|
recursively_tokenise(s, current_index + token_length, new_tokens)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lexer(s: &str) -> Vec<Token> {
|
||||||
|
recursively_tokenise(s, 0, Vec::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -330,6 +432,7 @@ mod tests {
|
|||||||
assert!(!is_string(" \"a\""));
|
assert!(!is_string(" \"a\""));
|
||||||
assert!(!is_string("5\"a\""));
|
assert!(!is_string("5\"a\""));
|
||||||
assert!(!is_string("a + 'str'"));
|
assert!(!is_string("a + 'str'"));
|
||||||
|
assert!(is_string("'c'"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -453,14 +556,20 @@ mod tests {
|
|||||||
assert!(!is_block_comment("5 + 5"));
|
assert!(!is_block_comment("5 + 5"));
|
||||||
assert!(!is_block_comment("5/* + 5"));
|
assert!(!is_block_comment("5/* + 5"));
|
||||||
assert!(!is_block_comment(" /* + 5"));
|
assert!(!is_block_comment(" /* + 5"));
|
||||||
|
assert!(!is_block_comment(
|
||||||
|
r#" /* and
|
||||||
|
here
|
||||||
|
*/
|
||||||
|
"#
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn make_token_test() {
|
fn make_token_test() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
make_token(TokenType::Word, "const", 56),
|
make_token(TokenType::Keyword, "const", 56),
|
||||||
Token {
|
Token {
|
||||||
token_type: TokenType::Word,
|
token_type: TokenType::Keyword,
|
||||||
value: "const".to_string(),
|
value: "const".to_string(),
|
||||||
start: 56,
|
start: 56,
|
||||||
end: 61,
|
end: 61,
|
||||||
@ -473,7 +582,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
return_token_at_index("const", 0),
|
return_token_at_index("const", 0),
|
||||||
Some(Token {
|
Some(Token {
|
||||||
token_type: TokenType::Word,
|
token_type: TokenType::Keyword,
|
||||||
value: "const".to_string(),
|
value: "const".to_string(),
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 5,
|
end: 5,
|
||||||
@ -496,7 +605,7 @@ mod tests {
|
|||||||
lexer("const a=5"),
|
lexer("const a=5"),
|
||||||
vec![
|
vec![
|
||||||
Token {
|
Token {
|
||||||
token_type: TokenType::Word,
|
token_type: TokenType::Keyword,
|
||||||
value: "const".to_string(),
|
value: "const".to_string(),
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 5,
|
end: 5,
|
||||||
@ -587,4 +696,11 @@ mod tests {
|
|||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
||||||
|
#[test]
|
||||||
|
fn test_token_type_to_semantic_token_type() {
|
||||||
|
let semantic_types = TokenType::to_semantic_token_types().unwrap();
|
||||||
|
assert!(!semantic_types.is_empty());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,12 @@
|
|||||||
//! Wasm bindings for `kcl`.
|
//! Wasm bindings for `kcl`.
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
use futures::stream::TryStreamExt;
|
||||||
use gloo_utils::format::JsValueSerdeExt;
|
use gloo_utils::format::JsValueSerdeExt;
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
use kcl_lib::server::{get_completions_from_stdlib, get_signatures_from_stdlib, Backend};
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
use tower_lsp::{LspService, Server};
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
// wasm_bindgen wrapper for execute
|
// wasm_bindgen wrapper for execute
|
||||||
@ -55,7 +61,8 @@ pub fn lexer_js(js: &str) -> Result<JsValue, JsError> {
|
|||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub fn parse_js(js: &str) -> Result<JsValue, String> {
|
pub fn parse_js(js: &str) -> Result<JsValue, String> {
|
||||||
let tokens = kcl_lib::tokeniser::lexer(js);
|
let tokens = kcl_lib::tokeniser::lexer(js);
|
||||||
let program = kcl_lib::parser::abstract_syntax_tree(&tokens).map_err(String::from)?;
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
|
let program = parser.ast().map_err(String::from)?;
|
||||||
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
||||||
// gloo-serialize crate instead.
|
// gloo-serialize crate instead.
|
||||||
JsValue::from_serde(&program).map_err(|e| e.to_string())
|
JsValue::from_serde(&program).map_err(|e| e.to_string())
|
||||||
@ -69,6 +76,81 @@ pub fn recast_wasm(json_str: &str) -> Result<JsValue, JsError> {
|
|||||||
let program: kcl_lib::abstract_syntax_tree_types::Program =
|
let program: kcl_lib::abstract_syntax_tree_types::Program =
|
||||||
serde_json::from_str(json_str).map_err(JsError::from)?;
|
serde_json::from_str(json_str).map_err(JsError::from)?;
|
||||||
|
|
||||||
let result = kcl_lib::recast::recast(&program, "", false);
|
// Use the default options until we integrate into the UI the ability to change them.
|
||||||
|
let result = program.recast(&Default::default(), 0);
|
||||||
Ok(JsValue::from_serde(&result)?)
|
Ok(JsValue::from_serde(&result)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[wasm_bindgen]
|
||||||
|
pub struct ServerConfig {
|
||||||
|
into_server: js_sys::AsyncIterator,
|
||||||
|
from_server: web_sys::WritableStream,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[wasm_bindgen]
|
||||||
|
impl ServerConfig {
|
||||||
|
#[wasm_bindgen(constructor)]
|
||||||
|
pub fn new(into_server: js_sys::AsyncIterator, from_server: web_sys::WritableStream) -> Self {
|
||||||
|
Self {
|
||||||
|
into_server,
|
||||||
|
from_server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Run the `kcl` lsp server.
|
||||||
|
//
|
||||||
|
// NOTE: we don't use web_sys::ReadableStream for input here because on the
|
||||||
|
// browser side we need to use a ReadableByteStreamController to construct it
|
||||||
|
// and so far only Chromium-based browsers support that functionality.
|
||||||
|
|
||||||
|
// NOTE: input needs to be an AsyncIterator<Uint8Array, never, void> specifically
|
||||||
|
#[cfg(target_arch = "wasm32")]
|
||||||
|
#[wasm_bindgen]
|
||||||
|
pub async fn lsp_run(config: ServerConfig) -> Result<(), JsValue> {
|
||||||
|
let ServerConfig {
|
||||||
|
into_server,
|
||||||
|
from_server,
|
||||||
|
} = config;
|
||||||
|
|
||||||
|
let stdlib = kcl_lib::std::StdLib::new();
|
||||||
|
let stdlib_completions = get_completions_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||||
|
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||||
|
// We can unwrap here because we know the tokeniser is valid, since
|
||||||
|
// we have a test for it.
|
||||||
|
let token_types = kcl_lib::tokeniser::TokenType::to_semantic_token_types().unwrap();
|
||||||
|
|
||||||
|
let (service, socket) = LspService::new(|client| Backend {
|
||||||
|
client,
|
||||||
|
stdlib_completions,
|
||||||
|
stdlib_signatures,
|
||||||
|
token_types,
|
||||||
|
token_map: Default::default(),
|
||||||
|
ast_map: Default::default(),
|
||||||
|
current_code_map: Default::default(),
|
||||||
|
diagnostics_map: Default::default(),
|
||||||
|
symbols_map: Default::default(),
|
||||||
|
semantic_tokens_map: Default::default(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let input = wasm_bindgen_futures::stream::JsStream::from(into_server);
|
||||||
|
let input = input
|
||||||
|
.map_ok(|value| {
|
||||||
|
value
|
||||||
|
.dyn_into::<js_sys::Uint8Array>()
|
||||||
|
.expect("could not cast stream item to Uint8Array")
|
||||||
|
.to_vec()
|
||||||
|
})
|
||||||
|
.map_err(|_err| std::io::Error::from(std::io::ErrorKind::Other))
|
||||||
|
.into_async_read();
|
||||||
|
|
||||||
|
let output = wasm_bindgen::JsCast::unchecked_into::<wasm_streams::writable::sys::WritableStream>(from_server);
|
||||||
|
let output = wasm_streams::WritableStream::from_raw(output);
|
||||||
|
let output = output.try_into_async_write().map_err(|err| err.0)?;
|
||||||
|
|
||||||
|
Server::new(input, output, socket).serve(service).await;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
521
yarn.lock
521
yarn.lock
@ -1157,7 +1157,7 @@
|
|||||||
"@babel/helper-validator-identifier" "^7.22.5"
|
"@babel/helper-validator-identifier" "^7.22.5"
|
||||||
to-fast-properties "^2.0.0"
|
to-fast-properties "^2.0.0"
|
||||||
|
|
||||||
"@codemirror/autocomplete@^6.0.0", "@codemirror/autocomplete@^6.3.2", "@codemirror/autocomplete@^6.7.1":
|
"@codemirror/autocomplete@^6.0.0", "@codemirror/autocomplete@^6.9.0":
|
||||||
version "6.9.0"
|
version "6.9.0"
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/autocomplete/-/autocomplete-6.9.0.tgz#1a1e63122288b8f8e1e9d7aff2eb39a83e04d8a9"
|
resolved "https://registry.yarnpkg.com/@codemirror/autocomplete/-/autocomplete-6.9.0.tgz#1a1e63122288b8f8e1e9d7aff2eb39a83e04d8a9"
|
||||||
integrity sha512-Fbwm0V/Wn3BkEJZRhr0hi5BhCo5a7eBL6LYaliPjOSwCyfOpnjXY59HruSxOUNV+1OYer0Tgx1zRNQttjXyDog==
|
integrity sha512-Fbwm0V/Wn3BkEJZRhr0hi5BhCo5a7eBL6LYaliPjOSwCyfOpnjXY59HruSxOUNV+1OYer0Tgx1zRNQttjXyDog==
|
||||||
@ -1177,222 +1177,7 @@
|
|||||||
"@codemirror/view" "^6.0.0"
|
"@codemirror/view" "^6.0.0"
|
||||||
"@lezer/common" "^1.0.0"
|
"@lezer/common" "^1.0.0"
|
||||||
|
|
||||||
"@codemirror/lang-angular@^0.1.0":
|
"@codemirror/language@^6.0.0":
|
||||||
version "0.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-angular/-/lang-angular-0.1.2.tgz#a3f565297842ad60caf2a0bf6f6137c13d19a666"
|
|
||||||
integrity sha512-Nq7lmx9SU+JyoaRcs6SaJs7uAmW2W06HpgJVQYeZptVGNWDzDvzhjwVb/ZuG1rwTlOocY4Y9GwNOBuKCeJbKtw==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-html" "^6.0.0"
|
|
||||||
"@codemirror/lang-javascript" "^6.1.2"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.3.3"
|
|
||||||
|
|
||||||
"@codemirror/lang-cpp@^6.0.0":
|
|
||||||
version "6.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-cpp/-/lang-cpp-6.0.2.tgz#076c98340c3beabde016d7d83e08eebe17254ef9"
|
|
||||||
integrity sha512-6oYEYUKHvrnacXxWxYa6t4puTlbN3dgV662BDfSH8+MfjQjVmP697/KYTDOqpxgerkvoNm7q5wlFMBeX8ZMocg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/cpp" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-css@^6.0.0", "@codemirror/lang-css@^6.2.0":
|
|
||||||
version "6.2.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-css/-/lang-css-6.2.1.tgz#5dc0a43b8e3c31f6af7aabd55ff07fe9aef2a227"
|
|
||||||
integrity sha512-/UNWDNV5Viwi/1lpr/dIXJNWiwDxpw13I4pTUAsNxZdg6E0mI2kTQb0P2iHczg1Tu+H4EBgJR+hYhKiHKko7qg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.2"
|
|
||||||
"@lezer/css" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-html@^6.0.0", "@codemirror/lang-html@^6.4.0":
|
|
||||||
version "6.4.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-html/-/lang-html-6.4.5.tgz#4cf014da02624a8a4365ef6c8e343f35afa0c784"
|
|
||||||
integrity sha512-dUCSxkIw2G+chaUfw3Gfu5kkN83vJQN8gfQDp9iEHsIZluMJA0YJveT12zg/28BJx+uPsbQ6VimKCgx3oJrZxA==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
|
||||||
"@codemirror/lang-css" "^6.0.0"
|
|
||||||
"@codemirror/lang-javascript" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.4.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@codemirror/view" "^6.2.2"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/css" "^1.1.0"
|
|
||||||
"@lezer/html" "^1.3.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-java@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-java/-/lang-java-6.0.1.tgz#03bd06334da7c8feb9dff6db01ac6d85bd2e48bb"
|
|
||||||
integrity sha512-OOnmhH67h97jHzCuFaIEspbmsT98fNdhVhmA3zCxW0cn7l8rChDhZtwiwJ/JOKXgfm4J+ELxQihxaI7bj7mJRg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/java" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-javascript@^6.0.0", "@codemirror/lang-javascript@^6.1.0", "@codemirror/lang-javascript@^6.1.2":
|
|
||||||
version "6.1.9"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-javascript/-/lang-javascript-6.1.9.tgz#19065ad32db7b3797829eca01b8d9c69da5fd0d6"
|
|
||||||
integrity sha512-z3jdkcqOEBT2txn2a87A0jSy6Te3679wg/U8QzMeftFt+4KA6QooMwfdFzJiuC3L6fXKfTXZcDocoaxMYfGz0w==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.6.0"
|
|
||||||
"@codemirror/lint" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@codemirror/view" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/javascript" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-json@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-json/-/lang-json-6.0.1.tgz#0a0be701a5619c4b0f8991f9b5e95fe33f462330"
|
|
||||||
integrity sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/json" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-less@^6.0.0", "@codemirror/lang-less@^6.0.1":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-less/-/lang-less-6.0.1.tgz#fef10e8dbcd07055b815c3928233a05a8549181e"
|
|
||||||
integrity sha512-ABcsKBjLbyPZwPR5gePpc8jEKCQrFF4pby2WlMVdmJOOr7OWwwyz8DZonPx/cKDE00hfoSLc8F7yAcn/d6+rTQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-css" "^6.2.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-lezer@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-lezer/-/lang-lezer-6.0.1.tgz#16a5909ab8ab4a23e9b214476413dc92a3191780"
|
|
||||||
integrity sha512-WHwjI7OqKFBEfkunohweqA5B/jIlxaZso6Nl3weVckz8EafYbPZldQEKSDb4QQ9H9BUkle4PVELP4sftKoA0uQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/lezer" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-markdown@^6.0.0", "@codemirror/lang-markdown@^6.1.0":
|
|
||||||
version "6.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-markdown/-/lang-markdown-6.2.0.tgz#d391d1314911da522bf4cc4edb15ff6b3eb66979"
|
|
||||||
integrity sha512-deKegEQVzfBAcLPqsJEa+IxotqPVwWZi90UOEvQbfa01NTAw8jNinrykuYPTULGUj+gha0ZG2HBsn4s5d64Qrg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.7.1"
|
|
||||||
"@codemirror/lang-html" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.3.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@codemirror/view" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/markdown" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-php@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-php/-/lang-php-6.0.1.tgz#fa34cc75562178325861a5731f79bd621f57ffaa"
|
|
||||||
integrity sha512-ublojMdw/PNWa7qdN5TMsjmqkNuTBD3k6ndZ4Z0S25SBAiweFGyY68AS3xNcIOlb6DDFDvKlinLQ40vSLqf8xA==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-html" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/php" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-python@^6.0.0", "@codemirror/lang-python@^6.1.0":
|
|
||||||
version "6.1.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-python/-/lang-python-6.1.3.tgz#47b8d9fb42eb4482317843e519c6c211accacb62"
|
|
||||||
integrity sha512-S9w2Jl74hFlD5nqtUMIaXAq9t5WlM0acCkyuQWUUSvZclk1sV+UfnpFiZzuZSG+hfEaOmxKR5UxY/Uxswn7EhQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.3.2"
|
|
||||||
"@codemirror/language" "^6.8.0"
|
|
||||||
"@lezer/python" "^1.1.4"
|
|
||||||
|
|
||||||
"@codemirror/lang-rust@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-rust/-/lang-rust-6.0.1.tgz#d6829fc7baa39a15bcd174a41a9e0a1bf7cf6ba8"
|
|
||||||
integrity sha512-344EMWFBzWArHWdZn/NcgkwMvZIWUR1GEBdwG8FEp++6o6vT6KL9V7vGs2ONsKxxFUPXKI0SPcWhyYyl2zPYxQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/rust" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-sass@^6.0.0", "@codemirror/lang-sass@^6.0.1":
|
|
||||||
version "6.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-sass/-/lang-sass-6.0.2.tgz#38c1b0a1326cc9f5cb2741d2cd51cfbcd7abc0b2"
|
|
||||||
integrity sha512-l/bdzIABvnTo1nzdY6U+kPAC51czYQcOErfzQ9zSm9D8GmNPD0WTW8st/CJwBTPLO8jlrbyvlSEcN20dc4iL0Q==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-css" "^6.2.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.2"
|
|
||||||
"@lezer/sass" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-sql@^6.0.0", "@codemirror/lang-sql@^6.4.0":
|
|
||||||
version "6.5.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-sql/-/lang-sql-6.5.3.tgz#e530f735f432afb7287c0e9bdd00496b8ae654ff"
|
|
||||||
integrity sha512-3M+0LgBN/H4ukfdX2E/6LnsCyOyas9jd+39c4DQu92ihlllE76arLM0RRBHR6IV0sVzpJq+wTcDgahwWtbQthg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-vue@^0.1.1":
|
|
||||||
version "0.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-vue/-/lang-vue-0.1.2.tgz#50aec87b93ba8a6b0742a24cbab566b3989ee6ca"
|
|
||||||
integrity sha512-D4YrefiRBAr+CfEIM4S3yvGSbYW+N69mttIfGMEf7diHpRbmygDxS+R/5xSqjgtkY6VO6qmUrre1GkRcWeZa9A==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-html" "^6.0.0"
|
|
||||||
"@codemirror/lang-javascript" "^6.1.2"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.3.1"
|
|
||||||
|
|
||||||
"@codemirror/lang-wast@^6.0.0":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-wast/-/lang-wast-6.0.1.tgz#c15bec84548a5e9b0a43fa69fb63631d087d6047"
|
|
||||||
integrity sha512-sQLsqhRjl2MWG3rxZysX+2XAyed48KhLBHLgq9xcKxIJu3npH/G+BIXW5NM5mHeDUjG0jcGh9BcjP0NfMStuzA==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lang-xml@^6.0.0":
|
|
||||||
version "6.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-xml/-/lang-xml-6.0.2.tgz#66f75390bf8013fd8645db9cdd0b1d177e0777a4"
|
|
||||||
integrity sha512-JQYZjHL2LAfpiZI2/qZ/qzDuSqmGKMwyApYmEUUCTxLM4MWS7sATUEfIguZQr9Zjx/7gcdnewb039smF6nC2zw==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.4.0"
|
|
||||||
"@codemirror/state" "^6.0.0"
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/xml" "^1.0.0"
|
|
||||||
|
|
||||||
"@codemirror/language-data@^6.0.0":
|
|
||||||
version "6.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/language-data/-/language-data-6.3.1.tgz#795ec09e04260868070296241363d70f4060bb36"
|
|
||||||
integrity sha512-p6jhJmvhGe1TG1EGNhwH7nFWWFSTJ8NDKnB2fVx5g3t+PpO0+63R7GJNxjS0TmmH3cdMxZbzejsik+rlEh1EyQ==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-angular" "^0.1.0"
|
|
||||||
"@codemirror/lang-cpp" "^6.0.0"
|
|
||||||
"@codemirror/lang-css" "^6.0.0"
|
|
||||||
"@codemirror/lang-html" "^6.0.0"
|
|
||||||
"@codemirror/lang-java" "^6.0.0"
|
|
||||||
"@codemirror/lang-javascript" "^6.0.0"
|
|
||||||
"@codemirror/lang-json" "^6.0.0"
|
|
||||||
"@codemirror/lang-less" "^6.0.0"
|
|
||||||
"@codemirror/lang-markdown" "^6.0.0"
|
|
||||||
"@codemirror/lang-php" "^6.0.0"
|
|
||||||
"@codemirror/lang-python" "^6.0.0"
|
|
||||||
"@codemirror/lang-rust" "^6.0.0"
|
|
||||||
"@codemirror/lang-sass" "^6.0.0"
|
|
||||||
"@codemirror/lang-sql" "^6.0.0"
|
|
||||||
"@codemirror/lang-vue" "^0.1.1"
|
|
||||||
"@codemirror/lang-wast" "^6.0.0"
|
|
||||||
"@codemirror/lang-xml" "^6.0.0"
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@codemirror/legacy-modes" "^6.1.0"
|
|
||||||
|
|
||||||
"@codemirror/language@^6.0.0", "@codemirror/language@^6.3.0", "@codemirror/language@^6.4.0", "@codemirror/language@^6.6.0", "@codemirror/language@^6.8.0":
|
|
||||||
version "6.8.0"
|
version "6.8.0"
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/language/-/language-6.8.0.tgz#f2d7eea6b338c25593d800f2293b062d9f9856db"
|
resolved "https://registry.yarnpkg.com/@codemirror/language/-/language-6.8.0.tgz#f2d7eea6b338c25593d800f2293b062d9f9856db"
|
||||||
integrity sha512-r1paAyWOZkfY0RaYEZj3Kul+MiQTEbDvYqf8gPGaRvNneHXCmfSaAVFjwRUPlgxS8yflMxw2CTu6uCMp8R8A2g==
|
integrity sha512-r1paAyWOZkfY0RaYEZj3Kul+MiQTEbDvYqf8gPGaRvNneHXCmfSaAVFjwRUPlgxS8yflMxw2CTu6uCMp8R8A2g==
|
||||||
@ -1404,13 +1189,6 @@
|
|||||||
"@lezer/lr" "^1.0.0"
|
"@lezer/lr" "^1.0.0"
|
||||||
style-mod "^4.0.0"
|
style-mod "^4.0.0"
|
||||||
|
|
||||||
"@codemirror/legacy-modes@^6.0.0", "@codemirror/legacy-modes@^6.1.0":
|
|
||||||
version "6.3.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/legacy-modes/-/legacy-modes-6.3.3.tgz#d7827c76c9533efdc76f7d0a0fc866f5acd4b764"
|
|
||||||
integrity sha512-X0Z48odJ0KIoh/HY8Ltz75/4tDYc9msQf1E/2trlxFaFFhgjpVHjZ/BCXe1Lk7s4Gd67LL/CeEEHNI+xHOiESg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
|
|
||||||
"@codemirror/lint@^6.0.0":
|
"@codemirror/lint@^6.0.0":
|
||||||
version "6.4.0"
|
version "6.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/lint/-/lint-6.4.0.tgz#3507e937aa9415ef0831ff04734ef0e736e75014"
|
resolved "https://registry.yarnpkg.com/@codemirror/lint/-/lint-6.4.0.tgz#3507e937aa9415ef0831ff04734ef0e736e75014"
|
||||||
@ -1444,7 +1222,7 @@
|
|||||||
"@codemirror/view" "^6.0.0"
|
"@codemirror/view" "^6.0.0"
|
||||||
"@lezer/highlight" "^1.0.0"
|
"@lezer/highlight" "^1.0.0"
|
||||||
|
|
||||||
"@codemirror/view@^6.0.0", "@codemirror/view@^6.2.2", "@codemirror/view@^6.6.0":
|
"@codemirror/view@^6.0.0", "@codemirror/view@^6.6.0":
|
||||||
version "6.16.0"
|
version "6.16.0"
|
||||||
resolved "https://registry.yarnpkg.com/@codemirror/view/-/view-6.16.0.tgz#047001b8dd04e104776c476e45ee9c4eed9f99fa"
|
resolved "https://registry.yarnpkg.com/@codemirror/view/-/view-6.16.0.tgz#047001b8dd04e104776c476e45ee9c4eed9f99fa"
|
||||||
integrity sha512-1Z2HkvkC3KR/oEZVuW9Ivmp8TWLzGEd8T8TA04TTwPvqogfkHBdYSlflytDOqmkUxM2d1ywTg7X2dU5mC+SXvg==
|
integrity sha512-1Z2HkvkC3KR/oEZVuW9Ivmp8TWLzGEd8T8TA04TTwPvqogfkHBdYSlflytDOqmkUxM2d1ywTg7X2dU5mC+SXvg==
|
||||||
@ -1752,37 +1530,21 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
|
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
|
||||||
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
|
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
|
||||||
|
|
||||||
"@kittycad/lib@^0.0.35":
|
"@kittycad/lib@^0.0.36":
|
||||||
version "0.0.35"
|
version "0.0.36"
|
||||||
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.35.tgz#bde8868048f9fd53f8309e7308aeba622898b935"
|
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.36.tgz#7b9676c975bc629f227d41897b38e7d73280db71"
|
||||||
integrity sha512-qM8AyP2QUlDfPWNxb1Fs/Pq9AebGVDN1OHjByxbGomKCy0jFdN2TsyDdhQH/CAZGfBCgPEfr5bq6rkUBGSXcNw==
|
integrity sha512-4bVXTaIzpSRuJAuLbAD/CWWTns7H/IxogPj0827n8mwXDkj+65EBCNXhJGWRkMG2CeTVJVk1LSWKlaHE+ToxGA==
|
||||||
dependencies:
|
dependencies:
|
||||||
node-fetch "3.3.2"
|
node-fetch "3.3.2"
|
||||||
openapi-types "^12.0.0"
|
openapi-types "^12.0.0"
|
||||||
ts-node "^10.9.1"
|
ts-node "^10.9.1"
|
||||||
tslib "~2.4"
|
tslib "~2.4"
|
||||||
|
|
||||||
"@lezer/common@^1.0.0", "@lezer/common@^1.0.2":
|
"@lezer/common@^1.0.0":
|
||||||
version "1.0.3"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/common/-/common-1.0.3.tgz#1808f70e2b0a7b1fdcbaf5c074723d2d4ed1e4c5"
|
resolved "https://registry.yarnpkg.com/@lezer/common/-/common-1.0.3.tgz#1808f70e2b0a7b1fdcbaf5c074723d2d4ed1e4c5"
|
||||||
integrity sha512-JH4wAXCgUOcCGNekQPLhVeUtIqjH0yPBs7vvUdSjyQama9618IOKFJwkv2kcqdhF0my8hQEgCTEJU0GIgnahvA==
|
integrity sha512-JH4wAXCgUOcCGNekQPLhVeUtIqjH0yPBs7vvUdSjyQama9618IOKFJwkv2kcqdhF0my8hQEgCTEJU0GIgnahvA==
|
||||||
|
|
||||||
"@lezer/cpp@^1.0.0":
|
|
||||||
version "1.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/cpp/-/cpp-1.1.1.tgz#ac0261f48dc3651bfea13fdaeff35f04c9011a7f"
|
|
||||||
integrity sha512-eS1M3L3U2mDowoFVPG7tEp01SWu9/68Nx3HEBgLJVn3N9ku7g5S7WdFv0jzmcTipAyONYfZJ+7x4WRkfdB2Ung==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/css@^1.0.0", "@lezer/css@^1.1.0":
|
|
||||||
version "1.1.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/css/-/css-1.1.3.tgz#605495b00fd8a122088becf196a93744cbe817fc"
|
|
||||||
integrity sha512-SjSM4pkQnQdJDVc80LYzEaMiNy9txsFbI7HsMgeVF28NdLaAdHNtQ+kB/QqDUzRBV/75NTXjJ/R5IdC8QQGxMg==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/highlight@^1.0.0", "@lezer/highlight@^1.1.3":
|
"@lezer/highlight@^1.0.0", "@lezer/highlight@^1.1.3":
|
||||||
version "1.1.6"
|
version "1.1.6"
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/highlight/-/highlight-1.1.6.tgz#87e56468c0f43c2a8b3dc7f0b7c2804b34901556"
|
resolved "https://registry.yarnpkg.com/@lezer/highlight/-/highlight-1.1.6.tgz#87e56468c0f43c2a8b3dc7f0b7c2804b34901556"
|
||||||
@ -1790,117 +1552,21 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
"@lezer/common" "^1.0.0"
|
"@lezer/common" "^1.0.0"
|
||||||
|
|
||||||
"@lezer/html@^1.3.0":
|
"@lezer/javascript@^1.4.7":
|
||||||
version "1.3.6"
|
version "1.4.7"
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/html/-/html-1.3.6.tgz#26a2a17da4e0f91835e36db9ccd025b2ed8d33f7"
|
resolved "https://registry.yarnpkg.com/@lezer/javascript/-/javascript-1.4.7.tgz#4ebcce2db6043c07fbe827188c07cb001bc7fe37"
|
||||||
integrity sha512-Kk9HJARZTc0bAnMQUqbtuhFVsB4AnteR2BFUWfZV7L/x1H0aAKz6YabrfJ2gk/BEgjh9L3hg5O4y2IDZRBdzuQ==
|
integrity sha512-OVWlK0YEi7HM+9JRWtRkir8qvcg0/kVYg2TAMHlVtl6DU1C9yK1waEOLBMztZsV/axRJxsqfJKhzYz+bxZme5g==
|
||||||
dependencies:
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/java@^1.0.0":
|
|
||||||
version "1.0.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/java/-/java-1.0.4.tgz#f31f5af4bfc40475dc886f0e3e2d291889b87d25"
|
|
||||||
integrity sha512-POc53LHf2AuNeRXjqZbXNu88GKj0KZTjjSx0L7tYeXlrEHF+3NAQx+dEwKVuCbkl0ZMtpRy2VsDYOV7KKV0oyg==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/javascript@^1.0.0":
|
|
||||||
version "1.4.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/javascript/-/javascript-1.4.5.tgz#4ab56dbcbff3e58ef331294a549903a5dd8d154a"
|
|
||||||
integrity sha512-FmBUHz8K1V22DgjTd6SrIG9owbzOYZ1t3rY6vGEmw+e2RVBd7sqjM8uXEVRFmfxKFn1Mx2ABJehHjrN3G2ZpmA==
|
|
||||||
dependencies:
|
dependencies:
|
||||||
"@lezer/highlight" "^1.1.3"
|
"@lezer/highlight" "^1.1.3"
|
||||||
"@lezer/lr" "^1.3.0"
|
"@lezer/lr" "^1.3.0"
|
||||||
|
|
||||||
"@lezer/json@^1.0.0":
|
"@lezer/lr@^1.0.0", "@lezer/lr@^1.3.0":
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/json/-/json-1.0.1.tgz#3bf5641f3d1408ec31a5f9b29e4e96c6e3a232e6"
|
|
||||||
integrity sha512-nkVC27qiEZEjySbi6gQRuMwa2sDu2PtfjSgz0A4QF81QyRGm3kb2YRzLcOPcTEtmcwvrX/cej7mlhbwViA4WJw==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/lezer@^1.0.0":
|
|
||||||
version "1.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/lezer/-/lezer-1.1.2.tgz#c2bf13d505ad193d9b8f6cdc1b0f9c71aa6abd98"
|
|
||||||
integrity sha512-O8yw3CxPhzYHB1hvwbdozjnAslhhR8A5BH7vfEMof0xk3p+/DFDfZkA9Tde6J+88WgtwaHy4Sy6ThZSkaI0Evw==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/lr@^1.0.0", "@lezer/lr@^1.1.0", "@lezer/lr@^1.3.0", "@lezer/lr@^1.3.1", "@lezer/lr@^1.3.3":
|
|
||||||
version "1.3.9"
|
version "1.3.9"
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/lr/-/lr-1.3.9.tgz#cb299816d1c58efcca23ebbeb70bb4204fdd001b"
|
resolved "https://registry.yarnpkg.com/@lezer/lr/-/lr-1.3.9.tgz#cb299816d1c58efcca23ebbeb70bb4204fdd001b"
|
||||||
integrity sha512-XPz6dzuTHlnsbA5M2DZgjflNQ+9Hi5Swhic0RULdp3oOs3rh6bqGZolosVqN/fQIT8uNiepzINJDnS39oweTHQ==
|
integrity sha512-XPz6dzuTHlnsbA5M2DZgjflNQ+9Hi5Swhic0RULdp3oOs3rh6bqGZolosVqN/fQIT8uNiepzINJDnS39oweTHQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@lezer/common" "^1.0.0"
|
"@lezer/common" "^1.0.0"
|
||||||
|
|
||||||
"@lezer/markdown@^1.0.0":
|
|
||||||
version "1.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/markdown/-/markdown-1.1.0.tgz#5cee104ef353a3442ecee023ff1912826fac8658"
|
|
||||||
integrity sha512-JYOI6Lkqbl83semCANkO3CKbKc0pONwinyagBufWBm+k4yhIcqfCF8B8fpEpvJLmIy7CAfwiq7dQ/PzUZA340g==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/common" "^1.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/php@^1.0.0":
|
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/php/-/php-1.0.1.tgz#4496b58c980ca710c0433fd743d27e9964fd74ea"
|
|
||||||
integrity sha512-aqdCQJOXJ66De22vzdwnuC502hIaG9EnPK2rSi+ebXyUd+j7GAX1mRjWZOVOmf3GST1YUfUCu6WXDiEgDGOVwA==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.1.0"
|
|
||||||
|
|
||||||
"@lezer/python@^1.1.4":
|
|
||||||
version "1.1.8"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/python/-/python-1.1.8.tgz#fe8d03d6cbc95a1d5625cffd30d78018ee816633"
|
|
||||||
integrity sha512-1T/XsmeF57ijrjpC0Zmrf9YeO5mn2zC1XeSNrOnc0KB+6PgxJ5m7kWKt0CnwyS74oHQXbJxUUL+QDQJR26c1Gw==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/rust@^1.0.0":
|
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/rust/-/rust-1.0.1.tgz#ac2d7263fe22527e621bb5623929ba6d6c3a29ea"
|
|
||||||
integrity sha512-j+ToFKM6Wpglv3OQ4ebHYdYIMT2dh0ziCCV0rTf47AWiHOVhR0WjaKrBq+yuvDQNEhr5sxPxVI7+naJIgpqcsQ==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/sass@^1.0.0":
|
|
||||||
version "1.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/sass/-/sass-1.0.3.tgz#17e5d27e40979bc8b4aec8d05df0d01f745aedb8"
|
|
||||||
integrity sha512-n4l2nVOB7gWiGU/Cg2IVxpt2Ic9Hgfgy/7gk+p/XJibAsPXs0lSbsfGwQgwsAw9B/euYo3oS6lEFr9WytoqcZg==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@lezer/xml@^1.0.0":
|
|
||||||
version "1.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/@lezer/xml/-/xml-1.0.2.tgz#5c934602d1d3565fdaf04e93b534c8b94f4df2d1"
|
|
||||||
integrity sha512-dlngsWceOtQBMuBPw5wtHpaxdPJ71aVntqjbpGkFtWsp4WtQmCnuTjQGocviymydN6M18fhj6UQX3oiEtSuY7w==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@nextjournal/lang-clojure@^1.0.0":
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@nextjournal/lang-clojure/-/lang-clojure-1.0.0.tgz#0efbd594769e606eea532758519a239f0d38959d"
|
|
||||||
integrity sha512-gOCV71XrYD0DhwGoPMWZmZ0r92/lIHsqQu9QWdpZYYBwiChNwMO4sbVMP7eTuAqffFB2BTtCSC+1skSH9d3bNg==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@nextjournal/lezer-clojure" "1.0.0"
|
|
||||||
|
|
||||||
"@nextjournal/lezer-clojure@1.0.0":
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@nextjournal/lezer-clojure/-/lezer-clojure-1.0.0.tgz#0e7ff75f8d0fabed36d26b9f6b5f00d8a9f385e6"
|
|
||||||
integrity sha512-VZyuGu4zw5mkTOwQBTaGVNWmsOZAPw5ZRxu1/Knk/Xfs7EDBIogwIs5UXTYkuECX5ZQB8eOB+wKA2pc7VyqaZQ==
|
|
||||||
dependencies:
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1":
|
"@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1":
|
||||||
version "5.1.1-v1"
|
version "5.1.1-v1"
|
||||||
resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129"
|
resolved "https://registry.yarnpkg.com/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz#dbf733a965ca47b1973177dc0bb6c889edcfb129"
|
||||||
@ -1929,6 +1595,16 @@
|
|||||||
"@nodelib/fs.scandir" "2.1.5"
|
"@nodelib/fs.scandir" "2.1.5"
|
||||||
fastq "^1.6.0"
|
fastq "^1.6.0"
|
||||||
|
|
||||||
|
"@open-rpc/client-js@^1.8.1":
|
||||||
|
version "1.8.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@open-rpc/client-js/-/client-js-1.8.1.tgz#73b5a5bf237f24b14c3c89205b1fca3aea213213"
|
||||||
|
integrity sha512-vV+Hetl688nY/oWI9IFY0iKDrWuLdYhf7OIKI6U1DcnJV7r4gAgwRJjEr1QVYszUc0gjkHoQJzqevmXMGLyA0g==
|
||||||
|
dependencies:
|
||||||
|
isomorphic-fetch "^3.0.0"
|
||||||
|
isomorphic-ws "^5.0.0"
|
||||||
|
strict-event-emitter-types "^2.0.0"
|
||||||
|
ws "^7.0.0"
|
||||||
|
|
||||||
"@react-hook/latest@^1.0.2":
|
"@react-hook/latest@^1.0.2":
|
||||||
version "1.0.3"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/@react-hook/latest/-/latest-1.0.3.tgz#c2d1d0b0af8b69ec6e2b3a2412ba0768ac82db80"
|
resolved "https://registry.yarnpkg.com/@react-hook/latest/-/latest-1.0.3.tgz#c2d1d0b0af8b69ec6e2b3a2412ba0768ac82db80"
|
||||||
@ -1953,26 +1629,6 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@remix-run/router/-/router-1.7.2.tgz#cba1cf0a04bc04cb66027c51fa600e9cbc388bc8"
|
resolved "https://registry.yarnpkg.com/@remix-run/router/-/router-1.7.2.tgz#cba1cf0a04bc04cb66027c51fa600e9cbc388bc8"
|
||||||
integrity sha512-7Lcn7IqGMV+vizMPoEl5F0XDshcdDYtMI6uJLQdQz5CfZAwy3vvGKYSUk789qndt5dEC4HfSjviSYlSoHGL2+A==
|
integrity sha512-7Lcn7IqGMV+vizMPoEl5F0XDshcdDYtMI6uJLQdQz5CfZAwy3vvGKYSUk789qndt5dEC4HfSjviSYlSoHGL2+A==
|
||||||
|
|
||||||
"@replit/codemirror-lang-csharp@^6.1.0":
|
|
||||||
version "6.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@replit/codemirror-lang-csharp/-/codemirror-lang-csharp-6.1.0.tgz#3f3087fe0938f35fcf2012357f364d22755508c7"
|
|
||||||
integrity sha512-Dtyk9WVrdPPgkgTp8MUX9HyXd87O7UZnFrE647gjHUZY8p0UN+z0m6dPfk6rJMsTTvMcl7YbDUykxfeqB6EQOQ==
|
|
||||||
|
|
||||||
"@replit/codemirror-lang-nix@^6.0.1":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@replit/codemirror-lang-nix/-/codemirror-lang-nix-6.0.1.tgz#d87af4ce9eb2cf30fdd64c9be0cb576783331217"
|
|
||||||
integrity sha512-lvzjoYn9nfJzBD5qdm3Ut6G3+Or2wEacYIDJ49h9+19WSChVnxv4ojf+rNmQ78ncuxIt/bfbMvDLMeMP0xze6g==
|
|
||||||
|
|
||||||
"@replit/codemirror-lang-solidity@^6.0.1":
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/@replit/codemirror-lang-solidity/-/codemirror-lang-solidity-6.0.1.tgz#c7e5ace087f9fa1a2c55b5b62f6bd0b064706a71"
|
|
||||||
integrity sha512-kDnak0xZelGmvzJwKTpMTl6gYSfFq9hnxrkbLaMV0CARq/MFvDQJmcmYon/k8uZqXy6DfzewKDV8tx9kY2WUZg==
|
|
||||||
|
|
||||||
"@replit/codemirror-lang-svelte@^6.0.0":
|
|
||||||
version "6.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/@replit/codemirror-lang-svelte/-/codemirror-lang-svelte-6.0.0.tgz#a9d36a2c762280db66809190f0d68fa43befe0d9"
|
|
||||||
integrity sha512-U2OqqgMM6jKelL0GNWbAmqlu1S078zZNoBqlJBW+retTc5M4Mha6/Y2cf4SVg6ddgloJvmcSpt4hHrVoM4ePRA==
|
|
||||||
|
|
||||||
"@rollup/pluginutils@^4.2.1":
|
"@rollup/pluginutils@^4.2.1":
|
||||||
version "4.2.1"
|
version "4.2.1"
|
||||||
resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-4.2.1.tgz#e6c6c3aba0744edce3fb2074922d3776c0af2a6d"
|
resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-4.2.1.tgz#e6c6c3aba0744edce3fb2074922d3776c0af2a6d"
|
||||||
@ -2171,6 +1827,13 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
"@babel/runtime" "^7.12.5"
|
"@babel/runtime" "^7.12.5"
|
||||||
|
|
||||||
|
"@ts-stack/markdown@^1.5.0":
|
||||||
|
version "1.5.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@ts-stack/markdown/-/markdown-1.5.0.tgz#5dc298a20dc3dc040143c5a5948201eb6bf5419d"
|
||||||
|
integrity sha512-ntVX2Kmb2jyTdH94plJohokvDVPvp6CwXHqsa9NVZTK8cOmHDCYNW0j6thIadUVRTStJhxhfdeovLd0owqDxLw==
|
||||||
|
dependencies:
|
||||||
|
tslib "^2.3.0"
|
||||||
|
|
||||||
"@tsconfig/node10@^1.0.7":
|
"@tsconfig/node10@^1.0.7":
|
||||||
version "1.0.9"
|
version "1.0.9"
|
||||||
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2"
|
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2"
|
||||||
@ -2213,6 +1876,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/crypto-js/-/crypto-js-4.1.1.tgz#602859584cecc91894eb23a4892f38cfa927890d"
|
resolved "https://registry.yarnpkg.com/@types/crypto-js/-/crypto-js-4.1.1.tgz#602859584cecc91894eb23a4892f38cfa927890d"
|
||||||
integrity sha512-BG7fQKZ689HIoc5h+6D2Dgq1fABRa0RbBWKBd9SP/MVRVXROflpm5fhwyATX5duFmbStzyzyycPB8qUYKDH3NA==
|
integrity sha512-BG7fQKZ689HIoc5h+6D2Dgq1fABRa0RbBWKBd9SP/MVRVXROflpm5fhwyATX5duFmbStzyzyycPB8qUYKDH3NA==
|
||||||
|
|
||||||
|
"@types/debounce@^1.2.1":
|
||||||
|
version "1.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.1.tgz#79b65710bc8b6d44094d286aecf38e44f9627852"
|
||||||
|
integrity sha512-epMsEE85fi4lfmJUH/89/iV/LI+F5CvNIvmgs5g5jYFPfhO2S/ae8WSsLOKWdwtoaZw9Q2IhJ4tQ5tFCcS/4HA==
|
||||||
|
|
||||||
"@types/eslint@^8.4.5":
|
"@types/eslint@^8.4.5":
|
||||||
version "8.44.1"
|
version "8.44.1"
|
||||||
resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.1.tgz#d1811559bb6bcd1a76009e3f7883034b78a0415e"
|
resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.1.tgz#d1811559bb6bcd1a76009e3f7883034b78a0415e"
|
||||||
@ -2453,10 +2121,10 @@
|
|||||||
"@typescript-eslint/types" "5.62.0"
|
"@typescript-eslint/types" "5.62.0"
|
||||||
eslint-visitor-keys "^3.3.0"
|
eslint-visitor-keys "^3.3.0"
|
||||||
|
|
||||||
"@uiw/codemirror-extensions-basic-setup@4.21.9":
|
"@uiw/codemirror-extensions-basic-setup@4.21.13":
|
||||||
version "4.21.9"
|
version "4.21.13"
|
||||||
resolved "https://registry.yarnpkg.com/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.21.9.tgz#e886c6e6ad477bc0943691b9572958c81a2beab3"
|
resolved "https://registry.yarnpkg.com/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.21.13.tgz#d7bcebf1906157bafde2d097dd6b63bcc772f54c"
|
||||||
integrity sha512-TQT6aF8brxZpFnk/K4fm/K/9k9eF3PMav/KKjHlYrGUT8BTNk/qL+ximLtIzvTUhmBFchjM1lrqSJdvpVom7/w==
|
integrity sha512-5ObHaBqPV00xBVleDFehzPfOQvek5dPM7YLdPHJUE9bumeSflIWJb55n0Zg/w1rsuU0Lt/Q6WJUh4X6VGR1FVw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@codemirror/autocomplete" "^6.0.0"
|
"@codemirror/autocomplete" "^6.0.0"
|
||||||
"@codemirror/commands" "^6.0.0"
|
"@codemirror/commands" "^6.0.0"
|
||||||
@ -2466,48 +2134,16 @@
|
|||||||
"@codemirror/state" "^6.0.0"
|
"@codemirror/state" "^6.0.0"
|
||||||
"@codemirror/view" "^6.0.0"
|
"@codemirror/view" "^6.0.0"
|
||||||
|
|
||||||
"@uiw/codemirror-extensions-langs@^4.21.9":
|
"@uiw/react-codemirror@^4.21.13":
|
||||||
version "4.21.9"
|
version "4.21.13"
|
||||||
resolved "https://registry.yarnpkg.com/@uiw/codemirror-extensions-langs/-/codemirror-extensions-langs-4.21.9.tgz#0cb18bb1a15ce272c8aa9613dc0b11d84eaefacb"
|
resolved "https://registry.yarnpkg.com/@uiw/react-codemirror/-/react-codemirror-4.21.13.tgz#b6e44cbccef70c1ff13bc905b46edc5bc3363dcc"
|
||||||
integrity sha512-s1VT1rss0iyvrtRl7BZtC5H7U5uQtCKTaD8wxjQrgZz5un9wHVvy9twU97aJGQR0FwbKWqK8/1iiICRJTRCoZA==
|
integrity sha512-kNX8jLeoDrF2CDa5lsey0MXjBXN3JP00z6AQTTP58mHvlE7Rf03QJSs7bNwwco+3kpwREifFJjnwRe+Y3Gmwtw==
|
||||||
dependencies:
|
|
||||||
"@codemirror/lang-angular" "^0.1.0"
|
|
||||||
"@codemirror/lang-cpp" "^6.0.0"
|
|
||||||
"@codemirror/lang-css" "^6.2.0"
|
|
||||||
"@codemirror/lang-html" "^6.4.0"
|
|
||||||
"@codemirror/lang-java" "^6.0.0"
|
|
||||||
"@codemirror/lang-javascript" "^6.1.0"
|
|
||||||
"@codemirror/lang-json" "^6.0.0"
|
|
||||||
"@codemirror/lang-less" "^6.0.1"
|
|
||||||
"@codemirror/lang-lezer" "^6.0.0"
|
|
||||||
"@codemirror/lang-markdown" "^6.1.0"
|
|
||||||
"@codemirror/lang-php" "^6.0.0"
|
|
||||||
"@codemirror/lang-python" "^6.1.0"
|
|
||||||
"@codemirror/lang-rust" "^6.0.0"
|
|
||||||
"@codemirror/lang-sass" "^6.0.1"
|
|
||||||
"@codemirror/lang-sql" "^6.4.0"
|
|
||||||
"@codemirror/lang-vue" "^0.1.1"
|
|
||||||
"@codemirror/lang-wast" "^6.0.0"
|
|
||||||
"@codemirror/lang-xml" "^6.0.0"
|
|
||||||
"@codemirror/language-data" "^6.0.0"
|
|
||||||
"@codemirror/legacy-modes" "^6.0.0"
|
|
||||||
"@nextjournal/lang-clojure" "^1.0.0"
|
|
||||||
"@replit/codemirror-lang-csharp" "^6.1.0"
|
|
||||||
"@replit/codemirror-lang-nix" "^6.0.1"
|
|
||||||
"@replit/codemirror-lang-solidity" "^6.0.1"
|
|
||||||
"@replit/codemirror-lang-svelte" "^6.0.0"
|
|
||||||
codemirror-lang-mermaid "^0.2.1"
|
|
||||||
|
|
||||||
"@uiw/react-codemirror@^4.15.1":
|
|
||||||
version "4.21.9"
|
|
||||||
resolved "https://registry.yarnpkg.com/@uiw/react-codemirror/-/react-codemirror-4.21.9.tgz#74393955d159a7d452731e61957773ae053c65b8"
|
|
||||||
integrity sha512-aeLegPz2iCvqJjhzXp2WUMqpMZDqxsTnF3rX9kGRlfY6vQLsrjoctj0cQ29uxEtFYJChOVjtCOtnQUlyIuNAHQ==
|
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/runtime" "^7.18.6"
|
"@babel/runtime" "^7.18.6"
|
||||||
"@codemirror/commands" "^6.1.0"
|
"@codemirror/commands" "^6.1.0"
|
||||||
"@codemirror/state" "^6.1.1"
|
"@codemirror/state" "^6.1.1"
|
||||||
"@codemirror/theme-one-dark" "^6.0.0"
|
"@codemirror/theme-one-dark" "^6.0.0"
|
||||||
"@uiw/codemirror-extensions-basic-setup" "4.21.9"
|
"@uiw/codemirror-extensions-basic-setup" "4.21.13"
|
||||||
codemirror "^6.0.0"
|
codemirror "^6.0.0"
|
||||||
|
|
||||||
"@vitejs/plugin-react@^4.0.3":
|
"@vitejs/plugin-react@^4.0.3":
|
||||||
@ -3027,15 +2663,6 @@ client-only@^0.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
|
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
|
||||||
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
|
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
|
||||||
|
|
||||||
codemirror-lang-mermaid@^0.2.1:
|
|
||||||
version "0.2.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/codemirror-lang-mermaid/-/codemirror-lang-mermaid-0.2.2.tgz#f7f6622c08f6ac459a7ce11632f9b5097b3da106"
|
|
||||||
integrity sha512-AqSzkQgfWsjBbifio3dy/zDj6WXEw4g52Mq6bltIWLMWryWWRMpFwjQSlHtCGOol1FENYObUF5KI4ofiv8bjXA==
|
|
||||||
dependencies:
|
|
||||||
"@codemirror/language" "^6.0.0"
|
|
||||||
"@lezer/highlight" "^1.0.0"
|
|
||||||
"@lezer/lr" "^1.0.0"
|
|
||||||
|
|
||||||
codemirror@^6.0.0:
|
codemirror@^6.0.0:
|
||||||
version "6.0.1"
|
version "6.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-6.0.1.tgz#62b91142d45904547ee3e0e0e4c1a79158035a29"
|
resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-6.0.1.tgz#62b91142d45904547ee3e0e0e4c1a79158035a29"
|
||||||
@ -4372,6 +3999,19 @@ isexe@^2.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||||
integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
|
integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
|
||||||
|
|
||||||
|
isomorphic-fetch@^3.0.0:
|
||||||
|
version "3.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4"
|
||||||
|
integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==
|
||||||
|
dependencies:
|
||||||
|
node-fetch "^2.6.1"
|
||||||
|
whatwg-fetch "^3.4.1"
|
||||||
|
|
||||||
|
isomorphic-ws@^5.0.0:
|
||||||
|
version "5.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf"
|
||||||
|
integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==
|
||||||
|
|
||||||
istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0:
|
istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0:
|
||||||
version "3.2.0"
|
version "3.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3"
|
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3"
|
||||||
@ -4498,6 +4138,11 @@ json-parse-even-better-errors@^2.3.0:
|
|||||||
resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
|
resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
|
||||||
integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
|
integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
|
||||||
|
|
||||||
|
json-rpc-2.0@^1.6.0:
|
||||||
|
version "1.6.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/json-rpc-2.0/-/json-rpc-2.0-1.6.0.tgz#60770ca98f663376126af7335ed2d30164691c89"
|
||||||
|
integrity sha512-+pKxaoIqnA5VjXmZiAI1+CkFG7mHLg+dhtliOe/mp1P5Gdn8P5kE/Xxp2CUBwnGL7pfw6gC8zWTWekhSnKzHFA==
|
||||||
|
|
||||||
json-schema-traverse@^0.4.1:
|
json-schema-traverse@^0.4.1:
|
||||||
version "0.4.1"
|
version "0.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
|
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
|
||||||
@ -4784,6 +4429,13 @@ node-fetch@3.3.2:
|
|||||||
fetch-blob "^3.1.4"
|
fetch-blob "^3.1.4"
|
||||||
formdata-polyfill "^4.0.10"
|
formdata-polyfill "^4.0.10"
|
||||||
|
|
||||||
|
node-fetch@^2.6.1:
|
||||||
|
version "2.7.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
|
||||||
|
integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
|
||||||
|
dependencies:
|
||||||
|
whatwg-url "^5.0.0"
|
||||||
|
|
||||||
node-fetch@^2.6.12:
|
node-fetch@^2.6.12:
|
||||||
version "2.6.12"
|
version "2.6.12"
|
||||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.12.tgz#02eb8e22074018e3d5a83016649d04df0e348fba"
|
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.12.tgz#02eb8e22074018e3d5a83016649d04df0e348fba"
|
||||||
@ -5541,6 +5193,11 @@ stop-iteration-iterator@^1.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
internal-slot "^1.0.4"
|
internal-slot "^1.0.4"
|
||||||
|
|
||||||
|
strict-event-emitter-types@^2.0.0:
|
||||||
|
version "2.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz#05e15549cb4da1694478a53543e4e2f4abcf277f"
|
||||||
|
integrity sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==
|
||||||
|
|
||||||
string-natural-compare@^3.0.1:
|
string-natural-compare@^3.0.1:
|
||||||
version "3.0.1"
|
version "3.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4"
|
resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4"
|
||||||
@ -5827,7 +5484,7 @@ tslib@^2.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.1.tgz#fd8c9a0ff42590b25703c0acb3de3d3f4ede0410"
|
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.1.tgz#fd8c9a0ff42590b25703c0acb3de3d3f4ede0410"
|
||||||
integrity sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==
|
integrity sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==
|
||||||
|
|
||||||
"tslib@^2.4.1 || ^1.9.3":
|
tslib@^2.3.0, "tslib@^2.4.1 || ^1.9.3":
|
||||||
version "2.6.2"
|
version "2.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
|
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
|
||||||
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
|
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
|
||||||
@ -6083,6 +5740,24 @@ vitest@^0.34.1:
|
|||||||
vite-node "0.34.1"
|
vite-node "0.34.1"
|
||||||
why-is-node-running "^2.2.2"
|
why-is-node-running "^2.2.2"
|
||||||
|
|
||||||
|
vscode-jsonrpc@8.1.0, vscode-jsonrpc@^8.1.0:
|
||||||
|
version "8.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/vscode-jsonrpc/-/vscode-jsonrpc-8.1.0.tgz#cb9989c65e219e18533cc38e767611272d274c94"
|
||||||
|
integrity sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw==
|
||||||
|
|
||||||
|
vscode-languageserver-protocol@^3.17.3:
|
||||||
|
version "3.17.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz#6d0d54da093f0c0ee3060b81612cce0f11060d57"
|
||||||
|
integrity sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA==
|
||||||
|
dependencies:
|
||||||
|
vscode-jsonrpc "8.1.0"
|
||||||
|
vscode-languageserver-types "3.17.3"
|
||||||
|
|
||||||
|
vscode-languageserver-types@3.17.3:
|
||||||
|
version "3.17.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.17.3.tgz#72d05e47b73be93acb84d6e311b5786390f13f64"
|
||||||
|
integrity sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA==
|
||||||
|
|
||||||
w3c-keyname@^2.2.4:
|
w3c-keyname@^2.2.4:
|
||||||
version "2.2.8"
|
version "2.2.8"
|
||||||
resolved "https://registry.yarnpkg.com/w3c-keyname/-/w3c-keyname-2.2.8.tgz#7b17c8c6883d4e8b86ac8aba79d39e880f8869c5"
|
resolved "https://registry.yarnpkg.com/w3c-keyname/-/w3c-keyname-2.2.8.tgz#7b17c8c6883d4e8b86ac8aba79d39e880f8869c5"
|
||||||
@ -6129,6 +5804,11 @@ whatwg-encoding@^2.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
iconv-lite "0.6.3"
|
iconv-lite "0.6.3"
|
||||||
|
|
||||||
|
whatwg-fetch@^3.4.1:
|
||||||
|
version "3.6.18"
|
||||||
|
resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.18.tgz#2f640cdee315abced7daeaed2309abd1e44e62d4"
|
||||||
|
integrity sha512-ltN7j66EneWn5TFDO4L9inYC1D+Czsxlrw2SalgjMmEMkLfA5SIZxEFdE6QtHFiiM6Q7WL32c7AkI3w6yxM84Q==
|
||||||
|
|
||||||
whatwg-mimetype@^3.0.0:
|
whatwg-mimetype@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz#5fa1a7623867ff1af6ca3dc72ad6b8a4208beba7"
|
resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz#5fa1a7623867ff1af6ca3dc72ad6b8a4208beba7"
|
||||||
@ -6194,6 +5874,11 @@ wrappy@1:
|
|||||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||||
integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
|
integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
|
||||||
|
|
||||||
|
ws@^7.0.0:
|
||||||
|
version "7.5.9"
|
||||||
|
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
|
||||||
|
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
|
||||||
|
|
||||||
ws@^8.13.0:
|
ws@^8.13.0:
|
||||||
version "8.13.0"
|
version "8.13.0"
|
||||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
|
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
|
||||||
|
Reference in New Issue
Block a user