side quests for lsp server (#2119)

* all

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* more

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* side quests only

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* A snapshot a day keeps the bugs away! 📷🐛 (OS: ubuntu)

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
This commit is contained in:
Jess Frazelle
2024-04-15 17:18:32 -07:00
committed by GitHub
parent 6d12aa48f8
commit 9a9c2223de
76 changed files with 3334 additions and 900 deletions

View File

@ -1105,7 +1105,7 @@ test('ProgramMemory can be serialised', async ({ page }) => {
|> line([0, -1], %)
|> close(%)
|> extrude(1, %)
|> patternLinear({
|> patternLinear3d({
axis: [1, 0, 1],
repetitions: 3,
distance: 6

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 68 KiB

View File

@ -3,7 +3,9 @@ import { engineCommandManager } from 'lib/singletons'
import { useState, useEffect } from 'react'
function useEngineCommands(): [CommandLog[], () => void] {
const [engineCommands, setEngineCommands] = useState<CommandLog[]>([])
const [engineCommands, setEngineCommands] = useState<CommandLog[]>(
engineCommandManager.commandLogs
)
useEffect(() => {
engineCommandManager.registerCommandLogCallback((commands) =>

View File

@ -1,6 +1,6 @@
import { LanguageServerClient } from 'editor/plugins/lsp'
import type * as LSP from 'vscode-languageserver-protocol'
import React, { createContext, useMemo, useContext } from 'react'
import React, { createContext, useMemo, useEffect, useContext } from 'react'
import { FromServer, IntoServer } from 'editor/plugins/lsp/codec'
import Server from '../editor/plugins/lsp/server'
import Client from '../editor/plugins/lsp/client'
@ -14,6 +14,7 @@ import { LanguageSupport } from '@codemirror/language'
import { useNavigate } from 'react-router-dom'
import { paths } from 'lib/paths'
import { FileEntry } from 'lib/types'
import { NetworkHealthState, useNetworkStatus } from './NetworkHealthIndicator'
const DEFAULT_FILE_NAME: string = 'main.kcl'
@ -60,16 +61,27 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
isCopilotLspServerReady,
setIsKclLspServerReady,
setIsCopilotLspServerReady,
isStreamReady,
} = useStore((s) => ({
isKclLspServerReady: s.isKclLspServerReady,
isCopilotLspServerReady: s.isCopilotLspServerReady,
setIsKclLspServerReady: s.setIsKclLspServerReady,
setIsCopilotLspServerReady: s.setIsCopilotLspServerReady,
isStreamReady: s.isStreamReady,
}))
const { auth } = useSettingsAuthContext()
const {
auth,
settings: {
context: {
modeling: { defaultUnit },
},
},
} = useSettingsAuthContext()
const token = auth?.context?.token
const navigate = useNavigate()
const { overallState } = useNetworkStatus()
const isNetworkOkay = overallState === NetworkHealthState.Ok
// So this is a bit weird, we need to initialize the lsp server and client.
// But the server happens async so we break this into two parts.
@ -87,7 +99,11 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
const lspClient = new LanguageServerClient({ client, name: 'kcl' })
return { lspClient }
}, [setIsKclLspServerReady, token])
}, [
setIsKclLspServerReady,
// We need a token for authenticating the server.
token,
])
// Here we initialize the plugin which will start the client.
// Now that we have multi-file support the name of the file is a dep of
@ -109,6 +125,25 @@ export const LspProvider = ({ children }: { children: React.ReactNode }) => {
return plugin
}, [kclLspClient, isKclLspServerReady])
// Re-execute the scene when the units change.
useEffect(() => {
let plugins = kclLspClient.plugins
for (let plugin of plugins) {
if (plugin.updateUnits && isStreamReady && isNetworkOkay) {
plugin.updateUnits(defaultUnit.current)
}
}
}, [
kclLspClient,
defaultUnit.current,
// We want to re-execute the scene if the network comes back online.
// The lsp server will only re-execute if there were previous errors or
// changes, so it's fine to send it thru here.
isStreamReady,
isNetworkOkay,
])
const { lspClient: copilotLspClient } = useMemo(() => {
const intoServer: IntoServer = new IntoServer()
const fromServer: FromServer = FromServer.create()

View File

@ -1,10 +1,5 @@
import { undo, redo } from '@codemirror/commands'
import ReactCodeMirror, {
Extension,
ViewUpdate,
SelectionRange,
drawSelection,
} from '@uiw/react-codemirror'
import ReactCodeMirror from '@uiw/react-codemirror'
import { TEST } from 'env'
import { useCommandsContext } from 'hooks/useCommandsContext'
import { useSettingsAuthContext } from 'hooks/useSettingsAuthContext'
@ -18,11 +13,15 @@ import { lineHighlightField } from 'editor/highlightextension'
import { roundOff } from 'lib/utils'
import {
lineNumbers,
rectangularSelection,
highlightActiveLineGutter,
highlightSpecialChars,
highlightActiveLine,
keymap,
EditorView,
dropCursor,
drawSelection,
ViewUpdate,
} from '@codemirror/view'
import {
indentWithTab,
@ -31,39 +30,41 @@ import {
history,
} from '@codemirror/commands'
import { lintGutter, lintKeymap, linter } from '@codemirror/lint'
import { kclErrToDiagnostic } from 'lang/errors'
import {
foldGutter,
foldKeymap,
bracketMatching,
indentOnInput,
codeFolding,
syntaxHighlighting,
defaultHighlightStyle,
} from '@codemirror/language'
import { useModelingContext } from 'hooks/useModelingContext'
import interact from '@replit/codemirror-interact'
import { engineCommandManager, sceneInfra, kclManager } from 'lib/singletons'
import { useKclContext } from 'lang/KclProvider'
import { ModelingMachineEvent } from 'machines/modelingMachine'
import {
NetworkHealthState,
useNetworkStatus,
} from 'components/NetworkHealthIndicator'
import { useHotkeys } from 'react-hotkeys-hook'
import { isTauri } from 'lib/isTauri'
import { useNavigate } from 'react-router-dom'
import { paths } from 'lib/paths'
import makeUrlPathRelative from 'lib/makeUrlPathRelative'
import { useLspContext } from 'components/LspProvider'
import { Prec, EditorState } from '@codemirror/state'
import { Prec, EditorState, Extension, SelectionRange } from '@codemirror/state'
import {
closeBrackets,
closeBracketsKeymap,
completionKeymap,
hasNextSnippetField,
} from '@codemirror/autocomplete'
import {
NetworkHealthState,
useNetworkStatus,
} from 'components/NetworkHealthIndicator'
import { kclErrorsToDiagnostics } from 'lang/errors'
export const editorShortcutMeta = {
formatCode: {
codeMirror: 'Alt-Shift-f',
display: 'Alt + Shift + F',
},
convertToVariable: {
@ -87,9 +88,9 @@ export const KclEditorPane = () => {
}))
const { code, errors } = useKclContext()
const lastEvent = useRef({ event: '', time: Date.now() })
const { copilotLSP, kclLSP } = useLspContext()
const { overallState } = useNetworkStatus()
const isNetworkOkay = overallState === NetworkHealthState.Ok
const { copilotLSP, kclLSP } = useLspContext()
const navigate = useNavigate()
useEffect(() => {
@ -125,7 +126,6 @@ export const KclEditorPane = () => {
const { enable: convertEnabled, handleClick: convertCallback } =
useConvertToVariable()
// const onChange = React.useCallback((value: string, viewUpdate: ViewUpdate) => {
const onChange = async (newCode: string) => {
// If we are just fucking around in a snippet, return early and don't
// trigger stuff below that might cause the component to re-render.
@ -135,9 +135,10 @@ export const KclEditorPane = () => {
if (editorView && hasNextSnippetField(editorView.state)) {
return
}
if (isNetworkOkay) kclManager.setCodeAndExecute(newCode)
else kclManager.setCode(newCode)
} //, []);
}
const lastSelection = useRef('')
const onUpdate = (viewUpdate: ViewUpdate) => {
// If we are just fucking around in a snippet, return early and don't
@ -207,6 +208,7 @@ export const KclEditorPane = () => {
lineHighlightField,
history(),
closeBrackets(),
codeFolding(),
keymap.of([
...closeBracketsKeymap,
...defaultKeymap,
@ -230,13 +232,6 @@ export const KclEditorPane = () => {
return false
},
},
{
key: editorShortcutMeta.formatCode.codeMirror,
run: () => {
kclManager.format()
return true
},
},
{
key: editorShortcutMeta.convertToVariable.codeMirror,
run: () => {
@ -257,6 +252,9 @@ export const KclEditorPane = () => {
if (!TEST) {
extensions.push(
lintGutter(),
linter((_view: EditorView) => {
return kclErrorsToDiagnostics(errors)
}),
lineNumbers(),
highlightActiveLineGutter(),
highlightSpecialChars(),
@ -268,10 +266,10 @@ export const KclEditorPane = () => {
closeBrackets(),
highlightActiveLine(),
highlightSelectionMatches(),
lintGutter(),
linter((_view) => {
return kclErrToDiagnostic(errors)
}),
syntaxHighlighting(defaultHighlightStyle, { fallback: true }),
rectangularSelection(),
drawSelection(),
dropCursor(),
interact({
rules: [
// a rule for a number dragger
@ -324,6 +322,7 @@ export const KclEditorPane = () => {
theme={theme}
onCreateEditor={(_editorView) => setEditorView(_editorView)}
indentWithTab={false}
basicSetup={false}
/>
</div>
)

View File

@ -77,7 +77,4 @@ export function applyConstraintEqualLength({
programMemory: kclManager.programMemory,
})
return { modifiedAst, pathToNodeMap }
// kclManager.updateAst(modifiedAst, true, {
// // callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
// })
}

View File

@ -16,7 +16,7 @@ export const lineHighlightField = StateField.define({
if (e.is(addLineHighlight)) {
lines = Decoration.none
const [from, to] = e.value || [0, 0]
if (!(from === to && from === 0)) {
if (from && to && !(from === to && from === 0)) {
lines = lines.update({ add: [matchDeco.range(from, to)] })
deco.push(matchDeco.range(from, to))
}

View File

@ -6,6 +6,10 @@ import { CopilotLspCompletionParams } from 'wasm-lib/kcl/bindings/CopilotLspComp
import { CopilotCompletionResponse } from 'wasm-lib/kcl/bindings/CopilotCompletionResponse'
import { CopilotAcceptCompletionParams } from 'wasm-lib/kcl/bindings/CopilotAcceptCompletionParams'
import { CopilotRejectCompletionParams } from 'wasm-lib/kcl/bindings/CopilotRejectCompletionParams'
import { UpdateUnitsParams } from 'wasm-lib/kcl/bindings/UpdateUnitsParams'
import { UpdateCanExecuteParams } from 'wasm-lib/kcl/bindings/UpdateCanExecuteParams'
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
// https://microsoft.github.io/language-server-protocol/specifications/specification-current/
@ -21,12 +25,17 @@ interface LSPRequestMap {
LSP.SemanticTokensParams,
LSP.SemanticTokens
]
'textDocument/formatting': [
LSP.DocumentFormattingParams,
LSP.TextEdit[] | null
]
'textDocument/foldingRange': [LSP.FoldingRangeParams, LSP.FoldingRange[]]
'copilot/getCompletions': [
CopilotLspCompletionParams,
CopilotCompletionResponse
]
'copilot/notifyAccepted': [CopilotAcceptCompletionParams, any]
'copilot/notifyRejected': [CopilotRejectCompletionParams, any]
'kcl/updateUnits': [UpdateUnitsParams, UpdateUnitsResponse | null]
'kcl/updateCanExecute': [UpdateCanExecuteParams, UpdateCanExecuteResponse]
}
// Client to server
@ -39,6 +48,8 @@ interface LSPNotifyMap {
'workspace/didCreateFiles': LSP.CreateFilesParams
'workspace/didRenameFiles': LSP.RenameFilesParams
'workspace/didDeleteFiles': LSP.DeleteFilesParams
'copilot/notifyAccepted': CopilotAcceptCompletionParams
'copilot/notifyRejected': CopilotRejectCompletionParams
}
export interface LanguageServerClientOptions {
@ -56,11 +67,11 @@ export interface LanguageServerOptions {
export class LanguageServerClient {
private client: Client
private name: string
readonly name: string
public ready: boolean
private plugins: LanguageServerPlugin[]
readonly plugins: LanguageServerPlugin[]
public initializePromise: Promise<void>
@ -185,6 +196,22 @@ export class LanguageServerClient {
return await this.request('textDocument/hover', params)
}
async textDocumentFormatting(params: LSP.DocumentFormattingParams) {
const serverCapabilities = this.getServerCapabilities()
if (!serverCapabilities.documentFormattingProvider) {
return
}
return await this.request('textDocument/formatting', params)
}
async textDocumentFoldingRange(params: LSP.FoldingRangeParams) {
const serverCapabilities = this.getServerCapabilities()
if (!serverCapabilities.foldingRangeProvider) {
return
}
return await this.request('textDocument/foldingRange', params)
}
async textDocumentCompletion(params: LSP.CompletionParams) {
const serverCapabilities = this.getServerCapabilities()
if (!serverCapabilities.completionProvider) {
@ -227,22 +254,34 @@ export class LanguageServerClient {
async accept(uuid: string) {
const badUids = this.queuedUids.filter((u) => u !== uuid)
this.queuedUids = []
await this.acceptCompletion({ uuid })
await this.rejectCompletions({ uuids: badUids })
this.acceptCompletion({ uuid })
this.rejectCompletions({ uuids: badUids })
}
async reject() {
const badUids = this.queuedUids
this.queuedUids = []
return await this.rejectCompletions({ uuids: badUids })
this.rejectCompletions({ uuids: badUids })
}
async acceptCompletion(params: CopilotAcceptCompletionParams) {
return await this.request('copilot/notifyAccepted', params)
acceptCompletion(params: CopilotAcceptCompletionParams) {
this.notify('copilot/notifyAccepted', params)
}
async rejectCompletions(params: CopilotRejectCompletionParams) {
return await this.request('copilot/notifyRejected', params)
rejectCompletions(params: CopilotRejectCompletionParams) {
this.notify('copilot/notifyRejected', params)
}
async updateUnits(
params: UpdateUnitsParams
): Promise<UpdateUnitsResponse | null> {
return await this.request('kcl/updateUnits', params)
}
async updateCanExecute(
params: UpdateCanExecuteParams
): Promise<UpdateCanExecuteResponse> {
return await this.request('kcl/updateCanExecute', params)
}
private processNotifications(notification: LSP.NotificationMessage) {

View File

@ -1,10 +1,18 @@
import { autocompletion } from '@codemirror/autocomplete'
import { Extension } from '@codemirror/state'
import { ViewPlugin, hoverTooltip, tooltips } from '@codemirror/view'
import { Extension, EditorState, Prec } from '@codemirror/state'
import {
ViewPlugin,
hoverTooltip,
EditorView,
keymap,
KeyBinding,
tooltips,
} from '@codemirror/view'
import { CompletionTriggerKind } from 'vscode-languageserver-protocol'
import { offsetToPos } from 'editor/plugins/lsp/util'
import { LanguageServerOptions } from 'editor/plugins/lsp'
import { syntaxTree } from '@codemirror/language'
import { syntaxTree, indentService, foldService } from '@codemirror/language'
import { linter, forEachDiagnostic, Diagnostic } from '@codemirror/lint'
import {
LanguageServerPlugin,
documentUri,
@ -12,21 +20,71 @@ import {
workspaceFolders,
} from 'editor/plugins/lsp/plugin'
export const kclIndentService = () => {
// Match the indentation of the previous line (if present).
return indentService.of((context, pos) => {
try {
const previousLine = context.lineAt(pos, -1)
const previousLineText = previousLine.text.replaceAll(
'\t',
' '.repeat(context.state.tabSize)
)
const match = previousLineText.match(/^(\s)*/)
if (match === null || match.length <= 0) return null
return match[0].length
} catch (err) {
console.error('Error in codemirror indentService', err)
}
return null
})
}
export function kclPlugin(options: LanguageServerOptions): Extension {
let plugin: LanguageServerPlugin | null = null
return [
documentUri.of(options.documentUri),
languageId.of('kcl'),
workspaceFolders.of(options.workspaceFolders),
ViewPlugin.define(
const viewPlugin = ViewPlugin.define(
(view) =>
(plugin = new LanguageServerPlugin(
options.client,
view,
options.allowHTMLContent
))
),
)
const kclKeymap: readonly KeyBinding[] = [
{
key: 'Alt-Shift-f',
run: (view: EditorView) => {
if (view.plugin === null) return false
// Get the current plugin from the map.
const p = view.plugin(viewPlugin)
if (p === null) return false
p.requestFormatting()
return true
},
},
]
// Create an extension for the key mappings.
const kclKeymapExt = Prec.highest(keymap.computeN([], () => [kclKeymap]))
const folding = foldService.of(
(state: EditorState, lineStart: number, lineEnd: number) => {
if (plugin == null) return null
// Get the folding ranges from the language server.
// Since this is async we directly need to update the folding ranges after.
return plugin?.foldingRange(lineStart, lineEnd)
}
)
return [
documentUri.of(options.documentUri),
languageId.of('kcl'),
workspaceFolders.of(options.workspaceFolders),
viewPlugin,
kclKeymapExt,
kclIndentService(),
hoverTooltip(
(view, pos) =>
plugin?.requestHoverTooltip(view, offsetToPos(view.state.doc, pos)) ??
@ -35,6 +93,17 @@ export function kclPlugin(options: LanguageServerOptions): Extension {
tooltips({
position: 'absolute',
}),
linter((view) => {
let diagnostics: Diagnostic[] = []
forEachDiagnostic(
view.state,
(d: Diagnostic, from: number, to: number) => {
diagnostics.push(d)
}
)
return diagnostics
}),
folding,
autocompletion({
defaultKeymap: true,
override: [

View File

@ -19,14 +19,23 @@ export interface LanguageOptions {
client: LanguageServerClient
}
export default function kclLanguage(options: LanguageOptions): LanguageSupport {
class KclLanguage extends Language {
constructor(options: LanguageOptions) {
const plugin = kclPlugin({
documentUri: options.documentUri,
workspaceFolders: options.workspaceFolders,
allowHTMLContent: true,
client: options.client,
})
super(
data,
// For now let's use the javascript parser.
// It works really well and has good syntax highlighting.
// We can use our lsp for the rest.
const lang = new Language(
data,
jsParser,
[
plugin,
EditorState.languageData.of(() => [
{
// https://codemirror.net/docs/ref/#commands.CommentTokens
@ -42,14 +51,11 @@ export default function kclLanguage(options: LanguageOptions): LanguageSupport {
],
'kcl'
)
// Create our supporting extension.
const kclLsp = kclPlugin({
documentUri: options.documentUri,
workspaceFolders: options.workspaceFolders,
allowHTMLContent: true,
client: options.client,
})
return new LanguageSupport(lang, [kclLsp])
}
}
export default function kclLanguage(options: LanguageOptions): LanguageSupport {
const lang = new KclLanguage(options)
return new LanguageSupport(lang)
}

View File

@ -1,4 +1,8 @@
import { completeFromList, snippetCompletion } from '@codemirror/autocomplete'
import {
completeFromList,
hasNextSnippetField,
snippetCompletion,
} from '@codemirror/autocomplete'
import { setDiagnostics } from '@codemirror/lint'
import { Facet } from '@codemirror/state'
import { EditorView, Tooltip } from '@codemirror/view'
@ -7,8 +11,8 @@ import {
CompletionItemKind,
CompletionTriggerKind,
} from 'vscode-languageserver-protocol'
import debounce from 'debounce-promise'
import { deferExecution } from 'lib/utils'
import type {
Completion,
CompletionContext,
@ -20,6 +24,12 @@ import type * as LSP from 'vscode-languageserver-protocol'
import { LanguageServerClient } from 'editor/plugins/lsp'
import { Marked } from '@ts-stack/markdown'
import { posToOffset } from 'editor/plugins/lsp/util'
import { Program, ProgramMemory } from 'lang/wasm'
import { kclManager } from 'lib/singletons'
import type { UnitLength } from 'wasm-lib/kcl/bindings/UnitLength'
import { lspDiagnosticsToKclErrors } from 'lang/errors'
import { UpdateUnitsResponse } from 'wasm-lib/kcl/bindings/UpdateUnitsResponse'
import { UpdateCanExecuteResponse } from 'wasm-lib/kcl/bindings/UpdateCanExecuteResponse'
const useLast = (values: readonly any[]) => values.reduce((_, v) => v, '')
export const documentUri = Facet.define<string, string>({ combine: useLast })
@ -29,18 +39,32 @@ export const workspaceFolders = Facet.define<
LSP.WorkspaceFolder[]
>({ combine: useLast })
const changesDelay = 500
const CompletionItemKindMap = Object.fromEntries(
Object.entries(CompletionItemKind).map(([key, value]) => [value, key])
) as Record<CompletionItemKind, string>
const changesDelay = 600
export class LanguageServerPlugin implements PluginValue {
public client: LanguageServerClient
public documentUri: string
public languageId: string
public workspaceFolders: LSP.WorkspaceFolder[]
private documentVersion: number
private foldingRanges: LSP.FoldingRange[] | null = null
private _defferer = deferExecution((code: string) => {
try {
this.client.textDocumentDidChange({
textDocument: {
uri: this.documentUri,
version: this.documentVersion++,
},
contentChanges: [{ text: code }],
})
} catch (e) {
console.error(e)
}
}, changesDelay)
constructor(
client: LanguageServerClient,
@ -60,9 +84,19 @@ export class LanguageServerPlugin implements PluginValue {
})
}
update({ docChanged }: ViewUpdate) {
update({ docChanged, state }: ViewUpdate) {
if (!docChanged) return
// If we are just fucking around in a snippet, return early and don't
// trigger stuff below that might cause the component to re-render.
// Otherwise we will not be able to tab thru the snippet portions.
// We explicitly dont check HasPrevSnippetField because we always add
// a ${} to the end of the function so that's fine.
// We only care about this for the 'kcl' plugin.
if (this.client.name === 'kcl' && hasNextSnippetField(state)) {
return
}
this.sendChange({
documentText: this.view.state.doc.toString(),
})
@ -100,23 +134,7 @@ export class LanguageServerPlugin implements PluginValue {
documentText = ''
}
try {
debounce(
() => {
return this.client.textDocumentDidChange({
textDocument: {
uri: this.documentUri,
version: this.documentVersion++,
},
contentChanges: [{ text: documentText }],
})
},
changesDelay,
{ leading: true }
)
} catch (e) {
console.error(e)
}
this._defferer(documentText)
}
requestDiagnostics(view: EditorView) {
@ -154,6 +172,126 @@ export class LanguageServerPlugin implements PluginValue {
return { pos, end, create: (view) => ({ dom }), above: true }
}
async getFoldingRanges(): Promise<LSP.FoldingRange[] | null> {
if (
!this.client.ready ||
!this.client.getServerCapabilities().foldingRangeProvider
)
return null
const result = await this.client.textDocumentFoldingRange({
textDocument: { uri: this.documentUri },
})
return result || null
}
async updateFoldingRanges() {
const foldingRanges = await this.getFoldingRanges()
if (foldingRanges === null) return
// Update the folding ranges.
this.foldingRanges = foldingRanges
}
// In the future if codemirrors foldService accepts async folding ranges
// then we will not have to store these and we can call getFoldingRanges
// here.
foldingRange(
lineStart: number,
lineEnd: number
): { from: number; to: number } | null {
if (this.foldingRanges === null) {
return null
}
for (let i = 0; i < this.foldingRanges.length; i++) {
const { startLine, endLine } = this.foldingRanges[i]
if (startLine === lineEnd) {
const range = {
// Set the fold start to the end of the first line
// With this, the fold will not include the first line
from: startLine,
to: endLine,
}
return range
}
}
return null
}
async updateUnits(units: UnitLength): Promise<UpdateUnitsResponse | null> {
if (this.client.name !== 'kcl') return null
if (!this.client.ready) return null
return await this.client.updateUnits({
textDocument: {
uri: this.documentUri,
},
text: this.view.state.doc.toString(),
units,
})
}
async updateCanExecute(
canExecute: boolean
): Promise<UpdateCanExecuteResponse | null> {
if (this.client.name !== 'kcl') return null
if (!this.client.ready) return null
let response = await this.client.updateCanExecute({
canExecute,
})
if (!canExecute && response.isExecuting) {
// We want to wait until the server is not busy before we reply to the
// caller.
while (response.isExecuting) {
await new Promise((resolve) => setTimeout(resolve, 100))
response = await this.client.updateCanExecute({
canExecute,
})
}
}
console.log('[lsp] kcl: updated canExecute', canExecute, response)
return response
}
async requestFormatting() {
if (
!this.client.ready ||
!this.client.getServerCapabilities().documentFormattingProvider
)
return null
this.sendChange({
documentText: this.view.state.doc.toString(),
})
const result = await this.client.textDocumentFormatting({
textDocument: { uri: this.documentUri },
options: {
tabSize: 2,
insertSpaces: true,
insertFinalNewline: true,
},
})
if (!result) return null
for (let i = 0; i < result.length; i++) {
const { range, newText } = result[i]
this.view.dispatch({
changes: [
{
from: posToOffset(this.view.state.doc, range.start)!,
to: posToOffset(this.view.state.doc, range.end)!,
insert: newText,
},
],
})
}
}
async requestCompletion(
context: CompletionContext,
{ line, character }: { line: number; character: number },
@ -239,9 +377,13 @@ export class LanguageServerPlugin implements PluginValue {
try {
switch (notification.method) {
case 'textDocument/publishDiagnostics':
this.processDiagnostics(
notification.params as PublishDiagnosticsParams
)
const params = notification.params as PublishDiagnosticsParams
this.processDiagnostics(params)
// Update the kcl errors pane.
/*kclManager.kclErrors = lspDiagnosticsToKclErrors(
this.view.state.doc,
params.diagnostics
)*/
break
case 'window/logMessage':
console.log(
@ -257,6 +399,23 @@ export class LanguageServerPlugin implements PluginValue {
notification.params
)
break
case 'kcl/astUpdated':
// The server has updated the AST, we should update elsewhere.
let updatedAst = notification.params as Program
console.log('[lsp]: Updated AST', updatedAst)
kclManager.ast = updatedAst
// Update the folding ranges, since the AST has changed.
// This is a hack since codemirror does not support async foldService.
// When they do we can delete this.
this.updateFoldingRanges()
break
case 'kcl/memoryUpdated':
// The server has updated the memory, we should update elsewhere.
let updatedMemory = notification.params as ProgramMemory
console.log('[lsp]: Updated Memory', updatedMemory)
kclManager.programMemory = updatedMemory
break
}
} catch (error) {
console.error(error)

View File

@ -2,6 +2,7 @@ import { InitOutput, ServerConfig } from 'wasm-lib/pkg/wasm_lib'
import { FromServer, IntoServer } from './codec'
import { fileSystemManager } from 'lang/std/fileSystemManager'
import { copilotLspRun, initPromise, kclLspRun } from 'lang/wasm'
import { engineCommandManager } from 'lib/singletons'
export default class Server {
readonly initOutput: InitOutput
@ -41,7 +42,7 @@ export default class Server {
if (type_ === 'copilot') {
await copilotLspRun(config, token)
} else if (type_ === 'kcl') {
await kclLspRun(config, token || '')
await kclLspRun(config, engineCommandManager, token || '')
}
}
}

View File

@ -1,9 +1,9 @@
import { useLayoutEffect, useEffect, useRef } from 'react'
import { parse } from '../lang/wasm'
import { useStore } from '../useStore'
import { engineCommandManager, kclManager } from 'lib/singletons'
import { deferExecution } from 'lib/utils'
import { Themes } from 'lib/theme'
import { makeDefaultPlanes, parse } from 'lang/wasm'
export function useSetupEngineManager(
streamRef: React.RefObject<HTMLDivElement>,
@ -46,6 +46,9 @@ export function useSetupEngineManager(
},
token,
theme,
makeDefaultPlanes: () => {
return makeDefaultPlanes(kclManager.engineCommandManager)
},
})
setStreamDimensions({
streamWidth: quadWidth,

View File

@ -1,4 +1,4 @@
import { kclErrToDiagnostic, KCLError } from './errors'
import { kclErrorsToDiagnostics, KCLError } from './errors'
describe('test kclErrToDiagnostic', () => {
it('converts KCL errors to CodeMirror diagnostics', () => {
@ -20,7 +20,7 @@ describe('test kclErrToDiagnostic', () => {
],
},
]
const diagnostics = kclErrToDiagnostic(errors)
const diagnostics = kclErrorsToDiagnostics(errors)
expect(diagnostics).toEqual([
{
from: 0,

View File

@ -1,5 +1,8 @@
import { Diagnostic } from '@codemirror/lint'
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
import { Diagnostic as CodeMirrorDiagnostic } from '@codemirror/lint'
import { posToOffset } from 'editor/plugins/lsp/util'
import { Diagnostic as LspDiagnostic } from 'vscode-languageserver-protocol'
import { Text } from '@codemirror/state'
type ExtractKind<T> = T extends { kind: infer K } ? K : never
export class KCLError {
@ -81,11 +84,47 @@ export class KCLUndefinedValueError extends KCLError {
}
}
/**
* Maps the lsp diagnostic to an array of KclErrors.
* Currently the diagnostics are all errors, but in the future they could include lints.
* */
export function lspDiagnosticsToKclErrors(
doc: Text,
diagnostics: LspDiagnostic[]
): KCLError[] {
return diagnostics
.flatMap(
({ range, message }) =>
new KCLError('unexpected', message, [
[posToOffset(doc, range.start)!, posToOffset(doc, range.end)!],
])
)
.filter(({ sourceRanges }) => {
const [from, to] = sourceRanges[0]
return (
from !== null && to !== null && from !== undefined && to !== undefined
)
})
.sort((a, b) => {
const c = a.sourceRanges[0][0]
const d = b.sourceRanges[0][0]
switch (true) {
case c < d:
return -1
case c > d:
return 1
}
return 0
})
}
/**
* Maps the KCL errors to an array of CodeMirror diagnostics.
* Currently the diagnostics are all errors, but in the future they could include lints.
* */
export function kclErrToDiagnostic(errors: KCLError[]): Diagnostic[] {
export function kclErrorsToDiagnostics(
errors: KCLError[]
): CodeMirrorDiagnostic[] {
return errors?.flatMap((err) => {
return err.sourceRanges.map(([from, to]) => {
return { from, to, message: err.msg, severity: 'error' }

View File

@ -5,6 +5,7 @@ import { exportSave } from 'lib/exportSave'
import { uuidv4 } from 'lib/utils'
import { getNodePathFromSourceRange } from 'lang/queryAst'
import { Themes, getThemeColorForEngine } from 'lib/theme'
import { DefaultPlanes } from 'wasm-lib/kcl/bindings/DefaultPlanes'
let lastMessage = ''
@ -816,7 +817,6 @@ failed cmd type was ${artifactThatFailed?.commandType}`
this.webrtcStatsCollector = undefined
}
finalizeIfAllConnectionsClosed() {
console.log(this.websocket, this.pc, this.unreliableDataChannel)
const allClosed =
this.websocket?.readyState === 3 &&
this.pc?.connectionState === 'closed' &&
@ -875,8 +875,8 @@ export class EngineCommandManager {
outSequence = 1
inSequence = 1
engineConnection?: EngineConnection
defaultPlanes: { xy: string; yz: string; xz: string } | null = null
_commandLogs: CommandLog[] = []
defaultPlanes: DefaultPlanes | null = null
commandLogs: CommandLog[] = []
_commandLogCallBack: (command: CommandLog[]) => void = () => {}
// Folks should realize that wait for ready does not get called _everytime_
// the connection resets and restarts, it only gets called the first time.
@ -914,6 +914,7 @@ export class EngineCommandManager {
set getAstCb(cb: () => Program) {
this.getAst = cb
}
private makeDefaultPlanes: () => Promise<DefaultPlanes> | null = () => null
start({
setMediaStream,
@ -922,6 +923,7 @@ export class EngineCommandManager {
height,
executeCode,
token,
makeDefaultPlanes,
theme = Themes.Dark,
}: {
setMediaStream: (stream: MediaStream) => void
@ -930,8 +932,10 @@ export class EngineCommandManager {
height: number
executeCode: (code?: string, force?: boolean) => void
token?: string
makeDefaultPlanes: () => Promise<DefaultPlanes>
theme?: Themes
}) {
this.makeDefaultPlanes = makeDefaultPlanes
if (width === 0 || height === 0) {
return
}
@ -1279,10 +1283,10 @@ export class EngineCommandManager {
tearDown() {
this.engineConnection?.tearDown()
}
startNewSession() {
async startNewSession() {
this.lastArtifactMap = this.artifactMap
this.artifactMap = {}
this.initPlanes()
await this.initPlanes()
}
subscribeTo<T extends ModelTypes>({
event,
@ -1326,6 +1330,16 @@ export class EngineCommandManager {
onConnectionStateChange(callback: (state: EngineConnectionState) => void) {
this.callbacksEngineStateConnection.push(callback)
}
// We make this a separate function so we can call it from wasm.
clearDefaultPlanes() {
this.defaultPlanes = null
}
async wasmGetDefaultPlanes(): Promise<string> {
if (this.defaultPlanes === null) {
await this.initPlanes()
}
return JSON.stringify(this.defaultPlanes)
}
endSession() {
const deleteCmd: EngineCommand = {
type: 'modeling_cmd_req',
@ -1334,20 +1348,20 @@ export class EngineCommandManager {
type: 'scene_clear_all',
},
}
this.defaultPlanes = null
this.clearDefaultPlanes()
this.engineConnection?.send(deleteCmd)
}
addCommandLog(message: CommandLog) {
if (this._commandLogs.length > 500) {
this._commandLogs.shift()
if (this.commandLogs.length > 500) {
this.commandLogs.shift()
}
this._commandLogs.push(message)
this.commandLogs.push(message)
this._commandLogCallBack([...this._commandLogs])
this._commandLogCallBack([...this.commandLogs])
}
clearCommandLogs() {
this._commandLogs = []
this._commandLogCallBack(this._commandLogs)
this.commandLogs = []
this._commandLogCallBack(this.commandLogs)
}
registerCommandLogCallback(callback: (command: CommandLog[]) => void) {
this._commandLogCallBack = callback
@ -1650,30 +1664,15 @@ export class EngineCommandManager {
}
private async initPlanes() {
if (this.planesInitialized()) return
const [xy, yz, xz] = [
await this.createPlane({
x_axis: { x: 1, y: 0, z: 0 },
y_axis: { x: 0, y: 1, z: 0 },
color: { r: 0.7, g: 0.28, b: 0.28, a: 0.4 },
}),
await this.createPlane({
x_axis: { x: 0, y: 1, z: 0 },
y_axis: { x: 0, y: 0, z: 1 },
color: { r: 0.28, g: 0.7, b: 0.28, a: 0.4 },
}),
await this.createPlane({
x_axis: { x: 1, y: 0, z: 0 },
y_axis: { x: 0, y: 0, z: 1 },
color: { r: 0.28, g: 0.28, b: 0.7, a: 0.4 },
}),
]
this.defaultPlanes = { xy, yz, xz }
const planes = await this.makeDefaultPlanes()
this.defaultPlanes = planes
this.subscribeTo({
event: 'select_with_point',
callback: ({ data }) => {
if (!data?.entity_id) return
if (![xy, yz, xz].includes(data.entity_id)) return
if (!planes) return
if (![planes.xy, planes.yz, planes.xz].includes(data.entity_id)) return
this.onPlaneSelectCallback(data.entity_id)
},
})
@ -1703,40 +1702,4 @@ export class EngineCommandManager {
},
})
}
private async createPlane({
x_axis,
y_axis,
color,
}: {
x_axis: Models['Point3d_type']
y_axis: Models['Point3d_type']
color: Models['Color_type']
}): Promise<string> {
const planeId = uuidv4()
await this.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'make_plane',
size: 100,
origin: { x: 0, y: 0, z: 0 },
x_axis,
y_axis,
clobber: false,
hide: true,
},
cmd_id: planeId,
})
await this.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'plane_set_color',
plane_id: planeId,
color,
},
cmd_id: uuidv4(),
})
await this.setPlaneHidden(planeId, true)
return planeId
}
}

View File

@ -10,6 +10,7 @@ import init, {
ServerConfig,
copilot_lsp_run,
kcl_lsp_run,
make_default_planes,
coredump,
} from '../wasm-lib/pkg/wasm_lib'
import { KCLError } from './errors'
@ -25,6 +26,7 @@ import { DEV } from 'env'
import { AppInfo } from 'wasm-lib/kcl/bindings/AppInfo'
import { CoreDumpManager } from 'lib/coredump'
import openWindow from 'lib/openWindow'
import { DefaultPlanes } from 'wasm-lib/kcl/bindings/DefaultPlanes'
export type { Program } from '../wasm-lib/kcl/bindings/Program'
export type { Value } from '../wasm-lib/kcl/bindings/Value'
@ -194,6 +196,21 @@ export const recast = (ast: Program): string => {
}
}
export const makeDefaultPlanes = async (
engineCommandManager: EngineCommandManager
): Promise<DefaultPlanes> => {
try {
const planes: DefaultPlanes = await make_default_planes(
engineCommandManager
)
return planes
} catch (e) {
// TODO: do something real with the error.
console.log('make default planes error', e)
throw e
}
}
export function lexer(str: string): Token[] {
try {
const tokens: Token[] = lexer_wasm(str)
@ -306,10 +323,16 @@ export async function copilotLspRun(config: ServerConfig, token: string) {
}
}
export async function kclLspRun(config: ServerConfig, token: string) {
export async function kclLspRun(
config: ServerConfig,
engineCommandManager: EngineCommandManager,
token: string
) {
try {
console.log('start kcl lsp')
await kcl_lsp_run(config, token, DEV)
const baseUnit =
(await getSettingsState)()?.modeling.defaultUnit.current || 'mm'
await kcl_lsp_run(config, engineCommandManager, baseUnit, token, DEV)
} catch (e: any) {
console.log('kcl lsp failed', e)
// We can't restart here because a moved value, we should do this another way.

View File

@ -1,7 +1,6 @@
import { type Models } from '@kittycad/lib'
import { Setting, settings } from './initialSettings'
import { AtLeast, PathValue, Paths } from 'lib/types'
import { ChangeEventHandler } from 'react'
import { CommandArgumentConfig } from 'lib/commandTypes'
export enum UnitSystem {

View File

@ -5,9 +5,20 @@ import {
} from '../lang/std/engineConnection'
import { Models } from '@kittycad/lib'
import { Themes } from './theme'
import { v4 as uuidv4 } from 'uuid'
import { DefaultPlanes } from 'wasm-lib/kcl/bindings/DefaultPlanes'
type WebSocketResponse = Models['WebSocketResponse_type']
const defaultPlanes: DefaultPlanes = {
xy: uuidv4(),
xz: uuidv4(),
yz: uuidv4(),
negXy: uuidv4(),
negXz: uuidv4(),
negYz: uuidv4(),
}
class MockEngineCommandManager {
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
constructor(mockParams: {
@ -37,6 +48,9 @@ class MockEngineCommandManager {
}
return Promise.resolve(JSON.stringify(response))
}
async wasmGetDefaultPlanes(): Promise<string> {
return JSON.stringify(defaultPlanes)
}
sendModelingCommandFromWasm(
id: string,
rangeStr: string,
@ -86,6 +100,9 @@ export async function executor(
height: 0,
executeCode: () => {},
theme: Themes.Dark,
makeDefaultPlanes: () => {
return new Promise((resolve) => resolve(defaultPlanes))
},
})
await engineCommandManager.waitForReady
engineCommandManager.startNewSession()

View File

@ -1898,6 +1898,7 @@ dependencies = [
"tower-lsp",
"ts-rs",
"twenty-twenty",
"url",
"uuid",
"wasm-bindgen",
"wasm-bindgen-futures",
@ -4338,6 +4339,7 @@ checksum = "fc2cae1fc5d05d47aa24b64f9a4f7cba24cdc9187a2084dd97ac57bef5eccae6"
dependencies = [
"thiserror",
"ts-rs-macros",
"url",
"uuid",
]

View File

@ -15,6 +15,7 @@ gloo-utils = "0.2.0"
kcl-lib = { path = "kcl" }
kittycad = { workspace = true }
serde_json = "1.0.115"
tokio = { version = "1.37.0", features = ["sync"] }
uuid = { version = "1.8.0", features = ["v4", "js", "serde"] }
wasm-bindgen = "0.2.91"
wasm-bindgen-futures = "0.4.42"

View File

@ -166,7 +166,7 @@ fn do_stdlib_inner(
quote! {
let code_blocks = vec![#(#cb),*];
code_blocks.iter().map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -780,7 +780,7 @@ fn generate_code_block_test(
.commands_ws(None, None, None, None, None,None, Some(false))
.await.unwrap();
let tokens = crate::token::lexer(#code_block);
let tokens = crate::token::lexer(#code_block).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;

View File

@ -25,7 +25,7 @@ mod test_examples_show {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nshow");
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -117,7 +117,7 @@ mod test_examples_show {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -263,7 +263,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_show {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,8 @@ mod test_examples_my_func {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nmyFunc");
let tokens =
crate::token::lexer("This is another code block.\nyes sirrr.\nmyFunc").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -117,7 +118,7 @@ mod test_examples_my_func {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmyFunc");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmyFunc").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -263,7 +264,7 @@ impl crate::docs::StdLibFn for MyFunc {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -26,7 +26,8 @@ mod test_examples_import {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nimport");
let tokens =
crate::token::lexer("This is another code block.\nyes sirrr.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -119,7 +120,7 @@ mod test_examples_import {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -265,7 +266,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,8 @@ mod test_examples_line_to {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nlineTo");
let tokens =
crate::token::lexer("This is another code block.\nyes sirrr.\nlineTo").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -117,7 +118,7 @@ mod test_examples_line_to {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nlineTo");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nlineTo").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -271,7 +272,7 @@ impl crate::docs::StdLibFn for LineTo {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_min {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nmin");
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nmin").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -117,7 +117,7 @@ mod test_examples_min {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmin");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmin").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -263,7 +263,7 @@ impl crate::docs::StdLibFn for Min {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_show {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_import {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_import {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_import {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -25,7 +25,7 @@ mod test_examples_show {
.commands_ws(None, None, None, None, None, None, Some(false))
.await
.unwrap();
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow");
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let units = kittycad::types::UnitLength::Mm;
@ -168,7 +168,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb);
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();

View File

@ -19,7 +19,7 @@ use kittycad_execution_plan_traits as ept;
use kittycad_execution_plan_traits::{Address, NumericPrimitive};
use kittycad_modeling_session::Session;
use self::{
use crate::{
binding_scope::{BindingScope, EpBinding, GetFnResult},
error::{CompileError, Error},
kcl_value_group::SingleValue,

View File

@ -9,7 +9,7 @@ use pretty_assertions::assert_eq;
use super::*;
fn must_plan(program: &str) -> (Vec<Instruction>, BindingScope, Address) {
let tokens = kcl_lib::token::lexer(program);
let tokens = kcl_lib::token::lexer(program).unwrap();
let parser = kcl_lib::parser::Parser::new(tokens);
let ast = parser.ast().unwrap();
let mut p = Planner::new();
@ -18,7 +18,7 @@ fn must_plan(program: &str) -> (Vec<Instruction>, BindingScope, Address) {
}
fn should_not_compile(program: &str) -> CompileError {
let tokens = kcl_lib::token::lexer(program);
let tokens = kcl_lib::token::lexer(program).unwrap();
let parser = kcl_lib::parser::Parser::new(tokens);
let ast = parser.ast().unwrap();
let mut p = Planner::new();
@ -392,7 +392,7 @@ async fn computed_object_property() {
let Some(EpBinding::Single(address_of_val)) = scope.get("val") else {
panic!("Unexpected binding for variable 'val': {:?}", scope.get("val"));
};
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();
@ -414,7 +414,7 @@ async fn computed_array_in_object() {
let Some(EpBinding::Single(address_of_val)) = scope.get("val") else {
panic!("Unexpected binding for variable 'val': {:?}", scope.get("val"));
};
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();
@ -436,7 +436,7 @@ async fn computed_object_in_array() {
let Some(EpBinding::Single(address_of_val)) = scope.get("val") else {
panic!("Unexpected binding for variable 'val': {:?}", scope.get("val"));
};
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();
@ -457,7 +457,7 @@ async fn computed_nested_object_property() {
let Some(EpBinding::Single(address_of_val)) = scope.get("val") else {
panic!("Unexpected binding for variable 'val': {:?}", scope.get("val"));
};
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();
@ -588,7 +588,7 @@ async fn computed_array_index() {
]
);
// Now let's run the program and check what's actually in the memory afterwards.
let tokens = kcl_lib::token::lexer(program);
let tokens = kcl_lib::token::lexer(program).unwrap();
let parser = kcl_lib::parser::Parser::new(tokens);
let ast = parser.ast().unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();
@ -1325,7 +1325,7 @@ async fn stdlib_cube_partial() {
|> close(%)
|> extrude(100.0, %)
"#;
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mut client = Some(test_client().await);
@ -1406,7 +1406,7 @@ async fn stdlib_cube_xline_yline() {
"#;
let (_plan, _scope, _last_address) = must_plan(program);
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mut client = Some(test_client().await);
@ -1488,7 +1488,7 @@ async fn stdlib_cube_with_tangential_arc_to() {
"#;
let (_plan, _scope, last_address) = must_plan(program);
assert_eq!(last_address, Address::ZERO + 76);
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mut client = Some(test_client().await);
@ -1818,7 +1818,7 @@ async fn cos_sin_pi() {
let Some(EpBinding::Constant(z)) = scope.get("z") else {
panic!("Unexpected binding for variable 'z': {:?}", scope.get("z"));
};
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program))
let ast = kcl_lib::parser::Parser::new(kcl_lib::token::lexer(program).unwrap())
.ast()
.unwrap();
let mem = crate::execute(ast, &mut None).await.unwrap();

View File

@ -16,7 +16,7 @@ use syn::{parse_macro_input, LitStr};
pub fn parse(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as LitStr);
let kcl_src = input.value();
let tokens = kcl_lib::token::lexer(&kcl_src);
let tokens = kcl_lib::token::lexer(&kcl_src).unwrap();
let ast = kcl_lib::parser::Parser::new(tokens).ast().unwrap();
let ast_struct = ast.bake(&Default::default());
quote!(#ast_struct).into()

View File

@ -37,14 +37,15 @@ serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.115"
sha2 = "0.10.8"
thiserror = "1.0.58"
ts-rs = { version = "7.1.1", features = ["uuid-impl"] }
ts-rs = { version = "7.1.1", features = ["uuid-impl", "url-impl"] }
url = { version = "2.5.0", features = ["serde"] }
uuid = { version = "1.8.0", features = ["v4", "js", "serde"] }
winnow = "0.5.40"
zip = { version = "0.6.6", default-features = false }
[target.'cfg(target_arch = "wasm32")'.dependencies]
js-sys = { version = "0.3.69" }
tokio = { version = "1.37.0", features = ["sync"] }
tokio = { version = "1.37.0", features = ["sync", "time"] }
tower-lsp = { version = "0.20.0", default-features = false, features = ["runtime-agnostic"] }
wasm-bindgen = "0.2.91"
wasm-bindgen-futures = "0.4.42"

View File

@ -14,7 +14,7 @@ pub fn bench_parse(c: &mut Criterion) {
("math", MATH_PROGRAM),
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
] {
let tokens = kcl_lib::token::lexer(file);
let tokens = kcl_lib::token::lexer(file).unwrap();
c.bench_function(&format!("parse_{name}"), move |b| {
let tok = tokens.clone();
b.iter(move || {
@ -26,7 +26,7 @@ pub fn bench_parse(c: &mut Criterion) {
}
fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program));
black_box(kcl_lib::token::lexer(program).unwrap());
}
criterion_group!(benches, bench_lex, bench_parse);

View File

@ -1,23 +1,23 @@
use iai::black_box;
pub fn parse(program: &str) {
let tokens = kcl_lib::token::lexer(program);
let tokens = kcl_lib::token::lexer(program).unwrap();
let tok = tokens.clone();
let parser = kcl_lib::parser::Parser::new(tok.clone());
black_box(parser.ast().unwrap());
}
fn lex_kitt() {
black_box(kcl_lib::token::lexer(KITT_PROGRAM));
black_box(kcl_lib::token::lexer(KITT_PROGRAM).unwrap());
}
fn lex_pipes() {
black_box(kcl_lib::token::lexer(PIPES_PROGRAM));
black_box(kcl_lib::token::lexer(PIPES_PROGRAM).unwrap());
}
fn lex_cube() {
black_box(kcl_lib::token::lexer(CUBE_PROGRAM));
black_box(kcl_lib::token::lexer(CUBE_PROGRAM).unwrap());
}
fn lex_math() {
black_box(kcl_lib::token::lexer(MATH_PROGRAM));
black_box(kcl_lib::token::lexer(MATH_PROGRAM).unwrap());
}
fn parse_kitt() {

View File

@ -170,7 +170,7 @@ pub async fn modify_ast_for_sketch(
let recasted = program.recast(&FormatOptions::default(), 0);
// Re-parse the ast so we get the correct source ranges.
let tokens = crate::token::lexer(&recasted);
let tokens = crate::token::lexer(&recasted)?;
let parser = crate::parser::Parser::new(tokens);
*program = parser.ast()?;

View File

@ -8,7 +8,9 @@ use parse_display::{Display, FromStr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value as JValue};
use tower_lsp::lsp_types::{CompletionItem, CompletionItemKind, DocumentSymbol, Range as LspRange, SymbolKind};
use tower_lsp::lsp_types::{
CompletionItem, CompletionItemKind, DocumentSymbol, FoldingRange, FoldingRangeKind, Range as LspRange, SymbolKind,
};
pub use crate::ast::types::{literal_value::LiteralValue, none::KclNone};
use crate::{
@ -22,7 +24,7 @@ use crate::{
mod literal_value;
mod none;
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
@ -36,8 +38,8 @@ pub struct Program {
impl Program {
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
let indentation = options.get_indentation(indentation_level);
let result =
self.body
let result = self
.body
.iter()
.map(|statement| match statement.clone() {
BodyItem::ExpressionStatement(expression_statement) => {
@ -45,20 +47,9 @@ impl Program {
.expression
.recast(options, indentation_level, false)
}
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration
.declarations
.iter()
.fold(String::new(), |mut output, declaration| {
let _ = write!(
output,
"{}{} {} = {}",
indentation,
variable_declaration.kind,
declaration.id.name,
declaration.init.recast(options, 0, false)
);
output
}),
BodyItem::VariableDeclaration(variable_declaration) => {
variable_declaration.recast(options, indentation_level)
}
BodyItem::ReturnStatement(return_statement) => {
format!(
"{}return {}",
@ -125,7 +116,7 @@ impl Program {
.to_string();
// Insert a final new line if the user wants it.
if options.insert_final_newline {
if options.insert_final_newline && !result.is_empty() {
format!("{}\n", result)
} else {
result
@ -214,6 +205,29 @@ impl Program {
symbols
}
// Return all the lsp folding ranges in the program.
pub fn get_lsp_folding_ranges(&self) -> Vec<FoldingRange> {
let mut ranges = vec![];
// We only care about the top level things in the program.
for item in &self.body {
match item {
BodyItem::ExpressionStatement(expression_statement) => {
if let Some(folding_range) = expression_statement.expression.get_lsp_folding_range() {
ranges.push(folding_range)
}
}
BodyItem::VariableDeclaration(variable_declaration) => {
if let Some(folding_range) = variable_declaration.get_lsp_folding_range() {
ranges.push(folding_range)
}
}
BodyItem::ReturnStatement(_return_statement) => continue,
}
}
ranges
}
/// Rename the variable declaration at the given position.
pub fn rename_symbol(&mut self, new_name: &str, pos: usize) {
// The position must be within the variable declaration.
@ -468,6 +482,26 @@ impl Value {
}
}
pub fn get_lsp_folding_range(&self) -> Option<FoldingRange> {
let recasted = self.recast(&FormatOptions::default(), 0, false);
// If the code only has one line then we don't need to fold it.
if recasted.lines().count() <= 1 {
return None;
}
// This unwrap is safe because we know that the code has at least one line.
let first_line = recasted.lines().next().unwrap().to_string();
Some(FoldingRange {
start_line: (self.start() + first_line.len()) as u32,
start_character: None,
end_line: self.end() as u32,
end_character: None,
kind: Some(FoldingRangeKind::Region),
collapsed_text: Some(first_line),
})
}
// Get the non code meta for the value.
pub fn get_non_code_meta(&self) -> Option<&NonCodeMeta> {
match self {
@ -1254,6 +1288,41 @@ impl VariableDeclaration {
}
}
pub fn get_lsp_folding_range(&self) -> Option<FoldingRange> {
let recasted = self.recast(&FormatOptions::default(), 0);
// If the recasted value only has one line, don't fold it.
if recasted.lines().count() <= 1 {
return None;
}
// This unwrap is safe because we know that the code has at least one line.
let first_line = recasted.lines().next().unwrap().to_string();
Some(FoldingRange {
start_line: (self.start() + first_line.len()) as u32,
start_character: None,
end_line: self.end() as u32,
end_character: None,
kind: Some(FoldingRangeKind::Region),
collapsed_text: Some(first_line),
})
}
pub fn recast(&self, options: &FormatOptions, indentation_level: usize) -> String {
let indentation = options.get_indentation(indentation_level);
self.declarations.iter().fold(String::new(), |mut output, declaration| {
let _ = write!(
output,
"{}{} {} = {}",
indentation,
self.kind,
declaration.id.name,
declaration.init.recast(options, indentation_level, false)
);
output
})
}
pub fn replace_value(&mut self, source_range: SourceRange, new_value: Value) {
for declaration in &mut self.declarations {
declaration.init.replace_value(source_range, new_value.clone());
@ -3095,6 +3164,50 @@ mod tests {
assert!(!completions.is_empty());
}
#[test]
fn test_get_lsp_folding_ranges() {
let code = r#"const part001 = startSketchOn('XY')
|> startProfileAt([0.0000000000, 5.0000000000], %)
|> line([0.4900857016, -0.0240763666], %)
startSketchOn('XY')
|> startProfileAt([0.0000000000, 5.0000000000], %)
|> line([0.4900857016, -0.0240763666], %)
const part002 = "part002"
const things = [part001, 0.0]
let blah = 1
const foo = false
let baz = {a: 1, b: "thing"}
fn ghi = (x) => {
return x
}
ghi("things")
"#;
let tokens = crate::token::lexer(code).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let folding_ranges = program.get_lsp_folding_ranges();
assert_eq!(folding_ranges.len(), 3);
assert_eq!(folding_ranges[0].start_line, 35);
assert_eq!(folding_ranges[0].end_line, 134);
assert_eq!(
folding_ranges[0].collapsed_text,
Some("const part001 = startSketchOn('XY')".to_string())
);
assert_eq!(folding_ranges[1].start_line, 155);
assert_eq!(folding_ranges[1].end_line, 254);
assert_eq!(
folding_ranges[1].collapsed_text,
Some("startSketchOn('XY')".to_string())
);
assert_eq!(folding_ranges[2].start_line, 390);
assert_eq!(folding_ranges[2].end_line, 403);
assert_eq!(folding_ranges[2].collapsed_text, Some("fn ghi = (x) => {".to_string()));
}
#[test]
fn test_get_lsp_symbols() {
let code = r#"const part001 = startSketchOn('XY')
@ -3111,20 +3224,76 @@ fn ghi = (x) => {
return x
}
"#;
let tokens = crate::token::lexer(code);
let tokens = crate::token::lexer(code).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let symbols = program.get_lsp_symbols(code);
assert_eq!(symbols.len(), 7);
}
#[test]
fn test_recast_empty_file() {
let some_program_string = r#""#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
// Its VERY important this comes back with zero new lines.
assert_eq!(recasted, r#""#);
}
#[test]
fn test_recast_empty_file_new_line() {
let some_program_string = r#"
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
// Its VERY important this comes back with zero new lines.
assert_eq!(recasted, r#""#);
}
#[test]
fn test_recast_nested_var_declaration_in_fn_body() {
let some_program_string = r#"fn cube = (pos, scale) => {
const sg = startSketchOn('XY')
|> startProfileAt(pos, %)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
|> close(%)
|> extrude(scale, %)
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"fn cube = (pos, scale) => {
const sg = startSketchOn('XY')
|> startProfileAt(pos, %)
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
|> close(%)
|> extrude(scale, %)
}
"#
);
}
#[test]
fn test_recast_with_bad_indentation() {
let some_program_string = r#"const part001 = startSketchOn('XY')
|> startProfileAt([0.0, 5.0], %)
|> line([0.4900857016, -0.0240763666], %)
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3145,7 +3314,7 @@ fn ghi = (x) => {
|> startProfileAt([0.0, 5.0], %)
|> line([0.4900857016, -0.0240763666], %) // hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3166,7 +3335,7 @@ fn ghi = (x) => {
|> line([0.4900857016, -0.0240763666], %)
// hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3193,7 +3362,7 @@ fn ghi = (x) => {
// this is also a comment
return things
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3219,7 +3388,7 @@ fn ghi = (x) => {
// this is also a comment
const thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3242,7 +3411,7 @@ const key = 'c'
// hello
const thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3272,7 +3441,7 @@ const thing = 'c'
const foo = 'bar' //
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3300,7 +3469,7 @@ const foo = 'bar' //
// hello
const thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3321,7 +3490,7 @@ const thing = 'foo'
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3353,7 +3522,7 @@ const mySk1 = startSketchOn('XY')
|> ry(45, %)
|> rx(45, %)
// one more for good measure"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3391,7 +3560,7 @@ const mySk1 = startSketchOn('XY')
intersectTag: 'seg01'
}, %)
|> line([-0.42, -1.72], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3417,7 +3586,7 @@ const yo = [
" hey oooooo really long long long"
]
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3435,7 +3604,7 @@ const key = 'c'
const things = "things"
// this is also a comment"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3456,7 +3625,7 @@ const things = "things"
// a comment
"
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3482,7 +3651,7 @@ const part001 = startSketchOn('XY')
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3509,7 +3678,7 @@ const part001 = startSketchOn('XY')
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3540,7 +3709,7 @@ fn ghi = (part001) => {
return part001
}
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("mySuperCoolPart", 6);
@ -3570,7 +3739,7 @@ fn ghi = (part001) => {
let some_program_string = r#"fn ghi = (x, y, z) => {
return x
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("newName", 10);
@ -3594,7 +3763,7 @@ fn ghi = (part001) => {
angle_start: 0,
angle_end: 180,
}, %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3627,7 +3796,7 @@ const cylinder = startSketchOn('-XZ')
}, %)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3652,7 +3821,7 @@ const cylinder = startSketchOn('-XZ')
}, %)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3667,7 +3836,7 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0,0], %)
|> xLine(5, %) // lin
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3690,7 +3859,7 @@ const firstExtrude = startSketchOn('XY')
|> close(%)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3729,7 +3898,7 @@ const firstExtrude = startSketchOn('XY')
|> close(%)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3757,7 +3926,7 @@ const firstExtrude = startSketchOn('XY')
#[tokio::test(flavor = "multi_thread")]
async fn test_recast_math_start_negative() {
let some_program_string = r#"const myVar = -5 + 6"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3770,7 +3939,7 @@ const firstExtrude = startSketchOn('XY')
let some_program_string = r#"fn thing = (arg0: number, arg1: string, tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3794,7 +3963,7 @@ const firstExtrude = startSketchOn('XY')
let some_program_string = r#"fn thing = (arg0: number[], arg1: string[], tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3818,7 +3987,7 @@ const firstExtrude = startSketchOn('XY')
let some_program_string = r#"fn thing = (arg0: number[], arg1: {thing: number, things: string[], more?: string}, tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3875,7 +4044,7 @@ const firstExtrude = startSketchOn('XY')
let some_program_string = r#"fn thing = () => {thing: number, things: string[], more?: string} {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3936,7 +4105,7 @@ startSketchOn('XY')
|> line([0, -(5 - thickness)], %)
|> line([0, -(5 - 1)], %)
|> line([0, -(-5 - 1)], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3952,7 +4121,7 @@ const FOS = 2
const sigmaAllow = 8
const width = 20
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3984,7 +4153,7 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(raw);
let tokens = crate::token::lexer(raw).unwrap();
let literal = crate::parser::parser_impl::unsigned_number_literal
.parse(&tokens)
.unwrap();
@ -4110,7 +4279,7 @@ const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
#[tokio::test(flavor = "multi_thread")]
async fn test_parse_object_bool() {
let some_program_string = r#"some_func({thing: true, other_thing: false})"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();

View File

@ -7,12 +7,13 @@ use anyhow::{anyhow, Result};
use dashmap::DashMap;
use futures::{SinkExt, StreamExt};
use kittycad::types::{OkWebSocketResponseData, WebSocketRequest, WebSocketResponse};
use tokio::sync::{mpsc, oneshot};
use tokio::sync::{mpsc, oneshot, RwLock};
use tokio_tungstenite::tungstenite::Message as WsMsg;
use crate::{
engine::EngineManager,
errors::{KclError, KclErrorDetails},
executor::DefaultPlanes,
};
#[derive(Debug, PartialEq)]
@ -30,6 +31,9 @@ pub struct EngineConnection {
tcp_read_handle: Arc<TcpReadHandle>,
socket_health: Arc<Mutex<SocketHealth>>,
batch: Arc<Mutex<Vec<(WebSocketRequest, crate::executor::SourceRange)>>>,
/// The default planes for the scene.
default_planes: Arc<RwLock<Option<DefaultPlanes>>>,
}
pub struct TcpRead {
@ -169,6 +173,7 @@ impl EngineConnection {
responses,
socket_health,
batch: Arc::new(Mutex::new(Vec::new())),
default_planes: Default::default(),
})
}
}
@ -179,6 +184,28 @@ impl EngineManager for EngineConnection {
self.batch.clone()
}
async fn default_planes(&self, source_range: crate::executor::SourceRange) -> Result<DefaultPlanes, KclError> {
{
let opt = self.default_planes.read().await.as_ref().cloned();
if let Some(planes) = opt {
return Ok(planes);
}
} // drop the read lock
let new_planes = self.new_default_planes(source_range).await?;
*self.default_planes.write().await = Some(new_planes.clone());
Ok(new_planes)
}
async fn clear_scene_post_hook(&self, source_range: crate::executor::SourceRange) -> Result<(), KclError> {
// Remake the default planes, since they would have been removed after the scene was cleared.
let new_planes = self.new_default_planes(source_range).await?;
*self.default_planes.write().await = Some(new_planes);
Ok(())
}
async fn inner_send_modeling_cmd(
&self,
id: uuid::Uuid,

View File

@ -6,7 +6,7 @@ use std::sync::{Arc, Mutex};
use anyhow::Result;
use kittycad::types::{OkWebSocketResponseData, WebSocketRequest};
use crate::errors::KclError;
use crate::{errors::KclError, executor::DefaultPlanes};
#[derive(Debug, Clone)]
pub struct EngineConnection {
@ -27,6 +27,14 @@ impl crate::engine::EngineManager for EngineConnection {
self.batch.clone()
}
async fn default_planes(&self, _source_range: crate::executor::SourceRange) -> Result<DefaultPlanes, KclError> {
Ok(DefaultPlanes::default())
}
async fn clear_scene_post_hook(&self, _source_range: crate::executor::SourceRange) -> Result<(), KclError> {
Ok(())
}
async fn inner_send_modeling_cmd(
&self,
_id: uuid::Uuid,

View File

@ -6,7 +6,10 @@ use anyhow::Result;
use kittycad::types::WebSocketRequest;
use wasm_bindgen::prelude::*;
use crate::errors::{KclError, KclErrorDetails};
use crate::{
errors::{KclError, KclErrorDetails},
executor::DefaultPlanes,
};
#[wasm_bindgen(module = "/../../lang/std/engineConnection.ts")]
extern "C" {
@ -21,6 +24,15 @@ extern "C" {
cmdStr: String,
idToRangeStr: String,
) -> Result<js_sys::Promise, js_sys::Error>;
#[wasm_bindgen(method, js_name = wasmGetDefaultPlanes, catch)]
fn get_default_planes(this: &EngineCommandManager) -> Result<js_sys::Promise, js_sys::Error>;
#[wasm_bindgen(method, js_name = clearDefaultPlanes, catch)]
fn clear_default_planes(this: &EngineCommandManager) -> Result<(), js_sys::Error>;
#[wasm_bindgen(method, js_name = startNewSession, catch)]
fn start_new_session(this: &EngineCommandManager) -> Result<js_sys::Promise, js_sys::Error>;
}
#[derive(Debug, Clone)]
@ -48,6 +60,70 @@ impl crate::engine::EngineManager for EngineConnection {
self.batch.clone()
}
async fn default_planes(&self, source_range: crate::executor::SourceRange) -> Result<DefaultPlanes, KclError> {
// Get the default planes.
let promise = self.manager.get_default_planes().map_err(|e| {
KclError::Engine(KclErrorDetails {
message: e.to_string().into(),
source_ranges: vec![source_range],
})
})?;
let value = crate::wasm::JsFuture::from(promise).await.map_err(|e| {
KclError::Engine(KclErrorDetails {
message: format!("Failed to wait for promise from get default planes: {:?}", e),
source_ranges: vec![source_range],
})
})?;
// Parse the value as a string.
let s = value.as_string().ok_or_else(|| {
KclError::Engine(KclErrorDetails {
message: format!(
"Failed to get string from response from get default planes: `{:?}`",
value
),
source_ranges: vec![source_range],
})
})?;
// Deserialize the response.
let default_planes: DefaultPlanes = serde_json::from_str(&s).map_err(|e| {
KclError::Engine(KclErrorDetails {
message: format!("Failed to deserialize default planes: {:?}", e),
source_ranges: vec![source_range],
})
})?;
Ok(default_planes)
}
async fn clear_scene_post_hook(&self, source_range: crate::executor::SourceRange) -> Result<(), KclError> {
self.manager.clear_default_planes().map_err(|e| {
KclError::Engine(KclErrorDetails {
message: e.to_string().into(),
source_ranges: vec![source_range],
})
})?;
// Start a new session.
let promise = self.manager.start_new_session().map_err(|e| {
KclError::Engine(KclErrorDetails {
message: e.to_string().into(),
source_ranges: vec![source_range],
})
})?;
crate::wasm::JsFuture::from(promise).await.map_err(|e| {
KclError::Engine(KclErrorDetails {
message: format!("Failed to wait for promise from start new session: {:?}", e),
source_ranges: vec![source_range],
})
})?;
Ok(())
}
async fn inner_send_modeling_cmd(
&self,
id: uuid::Uuid,

View File

@ -8,17 +8,38 @@ pub mod conn_mock;
#[cfg(feature = "engine")]
pub mod conn_wasm;
use std::sync::{Arc, Mutex};
use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
use kittycad::types::{OkWebSocketResponseData, WebSocketRequest};
use kittycad::types::{Color, ModelingCmd, OkWebSocketResponseData, WebSocketRequest};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::errors::{KclError, KclErrorDetails};
use crate::{
errors::{KclError, KclErrorDetails},
executor::{DefaultPlanes, Point3d},
};
#[async_trait::async_trait]
pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
/// Get the batch of commands to be sent to the engine.
fn batch(&self) -> Arc<Mutex<Vec<(kittycad::types::WebSocketRequest, crate::executor::SourceRange)>>>;
/// Get the default planes.
async fn default_planes(
&self,
_source_range: crate::executor::SourceRange,
) -> Result<DefaultPlanes, crate::errors::KclError>;
/// Helpers to be called after clearing a scene.
/// (These really only apply to wasm for now.
async fn clear_scene_post_hook(
&self,
source_range: crate::executor::SourceRange,
) -> Result<(), crate::errors::KclError>;
/// Send a modeling command and wait for the response message.
async fn inner_send_modeling_cmd(
&self,
@ -28,6 +49,24 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
id_to_source_range: std::collections::HashMap<uuid::Uuid, crate::executor::SourceRange>,
) -> Result<kittycad::types::OkWebSocketResponseData, crate::errors::KclError>;
async fn clear_scene(&self, source_range: crate::executor::SourceRange) -> Result<(), crate::errors::KclError> {
self.send_modeling_cmd(
uuid::Uuid::new_v4(),
source_range,
kittycad::types::ModelingCmd::SceneClearAll {},
)
.await?;
// Flush the batch queue, so clear is run right away.
// Otherwise the hooks below won't work.
self.flush_batch(source_range).await?;
// Do the after clear scene hook.
self.clear_scene_post_hook(source_range).await?;
Ok(())
}
async fn send_modeling_cmd(
&self,
id: uuid::Uuid,
@ -134,6 +173,149 @@ pub trait EngineManager: std::fmt::Debug + Send + Sync + 'static {
self.inner_send_modeling_cmd(id_final, source_range, final_req, id_to_source_range)
.await
}
async fn make_default_plane(
&self,
x_axis: Point3d,
y_axis: Point3d,
color: Option<Color>,
source_range: crate::executor::SourceRange,
) -> Result<uuid::Uuid, KclError> {
// Create new default planes.
let default_size = 100.0;
let default_origin = Point3d { x: 0.0, y: 0.0, z: 0.0 }.into();
let plane_id = uuid::Uuid::new_v4();
self.send_modeling_cmd(
plane_id,
source_range,
ModelingCmd::MakePlane {
clobber: false,
origin: default_origin,
size: default_size,
x_axis: x_axis.into(),
y_axis: y_axis.into(),
hide: Some(true),
},
)
.await?;
if let Some(color) = color {
// Set the color.
self.send_modeling_cmd(
uuid::Uuid::new_v4(),
source_range,
ModelingCmd::PlaneSetColor { color, plane_id },
)
.await?;
}
Ok(plane_id)
}
async fn new_default_planes(&self, source_range: crate::executor::SourceRange) -> Result<DefaultPlanes, KclError> {
let plane_settings: HashMap<PlaneName, (Point3d, Point3d, Option<Color>)> = HashMap::from([
(
PlaneName::Xy,
(
Point3d { x: 1.0, y: 0.0, z: 0.0 },
Point3d { x: 0.0, y: 1.0, z: 0.0 },
Some(Color {
r: 0.7,
g: 0.28,
b: 0.28,
a: 0.4,
}),
),
),
(
PlaneName::Yz,
(
Point3d { x: 0.0, y: 1.0, z: 0.0 },
Point3d { x: 0.0, y: 0.0, z: 1.0 },
Some(Color {
r: 0.28,
g: 0.7,
b: 0.28,
a: 0.4,
}),
),
),
(
PlaneName::Xz,
(
Point3d {
x: -1.0,
y: 0.0,
z: 0.0,
},
Point3d { x: 0.0, y: 0.0, z: 1.0 },
Some(Color {
r: 0.28,
g: 0.28,
b: 0.7,
a: 0.4,
}),
),
),
(
PlaneName::NegXy,
(
Point3d {
x: -1.0,
y: 0.0,
z: 0.0,
},
Point3d { x: 0.0, y: 1.0, z: 0.0 },
None,
),
),
(
PlaneName::NegYz,
(
Point3d {
x: 0.0,
y: -1.0,
z: 0.0,
},
Point3d { x: 0.0, y: 0.0, z: 1.0 },
None,
),
),
(
PlaneName::NegXz,
(
Point3d {
x: 1.0, // TODO this should be -1.0
y: 0.0,
z: 0.0,
},
Point3d { x: 0.0, y: 0.0, z: 1.0 },
None,
),
),
]);
let mut planes = HashMap::new();
for (name, (x_axis, y_axis, color)) in plane_settings {
planes.insert(
name,
self.make_default_plane(x_axis, y_axis, color, source_range).await?,
);
}
// Flush the batch queue, so these planes are created right away.
self.flush_batch(source_range).await?;
Ok(DefaultPlanes {
xy: planes[&PlaneName::Xy],
neg_xy: planes[&PlaneName::NegXy],
xz: planes[&PlaneName::Xz],
neg_xz: planes[&PlaneName::NegXz],
yz: planes[&PlaneName::Yz],
neg_yz: planes[&PlaneName::NegYz],
})
}
}
pub fn is_cmd_with_return_values(cmd: &kittycad::types::ModelingCmd) -> bool {
@ -190,3 +372,21 @@ pub fn is_cmd_with_return_values(cmd: &kittycad::types::ModelingCmd) -> bool {
true
}
#[derive(Debug, Hash, Eq, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub enum PlaneName {
/// The XY plane.
Xy,
/// The opposite side of the XY plane.
NegXy,
/// The XZ plane.
Xz,
/// The opposite side of the XZ plane.
NegXz,
/// The YZ plane.
Yz,
/// The opposite side of the YZ plane.
NegYz,
}

View File

@ -225,6 +225,18 @@ pub struct Plane {
pub meta: Vec<Metadata>,
}
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct DefaultPlanes {
pub xy: uuid::Uuid,
pub xz: uuid::Uuid,
pub yz: uuid::Uuid,
pub neg_xy: uuid::Uuid,
pub neg_xz: uuid::Uuid,
pub neg_yz: uuid::Uuid,
}
/// A face.
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
@ -655,10 +667,15 @@ impl SourceRange {
}
pub fn end_to_lsp_position(&self, code: &str) -> LspPosition {
let lines = code[..self.end()].lines();
if lines.clone().count() == 0 {
return LspPosition { line: 0, character: 0 };
}
// Calculate the line and column of the error from the source range.
// Lines are zero indexed in vscode so we need to subtract 1.
let line = code[..self.end()].lines().count() - 1;
let column = code[..self.end()].lines().last().map(|l| l.len()).unwrap_or_default();
let line = lines.clone().count() - 1;
let column = lines.last().map(|l| l.len()).unwrap_or_default();
LspPosition {
line: line as u32,
@ -972,7 +989,7 @@ impl Default for PipeInfo {
#[derive(Debug, Clone)]
pub struct ExecutorContext {
pub engine: Arc<Box<dyn EngineManager>>,
pub fs: FileManager,
pub fs: Arc<FileManager>,
pub stdlib: Arc<StdLib>,
pub units: kittycad::types::UnitLength,
/// Mock mode is only for the modeling app when they just want to mock engine calls and not
@ -986,7 +1003,7 @@ impl ExecutorContext {
pub async fn new(ws: reqwest::Upgraded, units: kittycad::types::UnitLength) -> Result<Self> {
Ok(Self {
engine: Arc::new(Box::new(crate::engine::conn::EngineConnection::new(ws).await?)),
fs: FileManager::new(),
fs: Arc::new(FileManager::new()),
stdlib: Arc::new(StdLib::new()),
units,
is_mock: false,
@ -1310,12 +1327,12 @@ mod tests {
use crate::ast::types::{Identifier, Parameter};
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
let tokens = crate::token::lexer(code);
let tokens = crate::token::lexer(code)?;
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?;
let ctx = ExecutorContext {
engine: Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await?)),
fs: crate::fs::FileManager::new(),
fs: Arc::new(crate::fs::FileManager::new()),
stdlib: Arc::new(crate::std::StdLib::new()),
units: kittycad::types::UnitLength::Mm,
is_mock: false,

View File

@ -14,6 +14,7 @@ pub mod fs;
pub mod lsp;
pub mod parser;
pub mod std;
pub mod thread;
pub mod token;
#[cfg(target_arch = "wasm32")]
pub mod wasm;

View File

@ -1,57 +1,155 @@
//! A shared backend trait for lsp servers memory and behavior.
use std::sync::Arc;
use anyhow::Result;
use dashmap::DashMap;
use tokio::sync::RwLock;
use tower_lsp::lsp_types::{
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializedParams, MessageType, RenameFilesParams,
TextDocumentItem, WorkspaceFolder,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializedParams, MessageType,
RenameFilesParams, TextDocumentItem, WorkspaceFolder,
};
use crate::fs::FileSystem;
use crate::{
fs::FileSystem,
lsp::safemap::SafeMap,
thread::{JoinHandle, Thread},
};
#[derive(Clone)]
pub struct InnerHandle(Arc<JoinHandle>);
impl InnerHandle {
pub fn new(handle: JoinHandle) -> Self {
Self(Arc::new(handle))
}
pub fn is_finished(&self) -> bool {
self.0.is_finished()
}
pub fn cancel(&self) {
self.0.abort();
}
}
#[derive(Clone)]
pub struct UpdateHandle(Arc<RwLock<Option<InnerHandle>>>);
impl UpdateHandle {
pub fn new(handle: InnerHandle) -> Self {
Self(Arc::new(RwLock::new(Some(handle))))
}
pub async fn read(&self) -> Option<InnerHandle> {
self.0.read().await.clone()
}
pub async fn write(&self, handle: Option<InnerHandle>) {
*self.0.write().await = handle;
}
}
impl Default for UpdateHandle {
fn default() -> Self {
Self(Arc::new(RwLock::new(None)))
}
}
/// A trait for the backend of the language server.
#[async_trait::async_trait]
pub trait Backend {
pub trait Backend: Clone + Send + Sync
where
Self: 'static,
{
fn client(&self) -> tower_lsp::Client;
fn fs(&self) -> crate::fs::FileManager;
fn fs(&self) -> Arc<crate::fs::FileManager>;
fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
async fn is_initialized(&self) -> bool;
fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
async fn set_is_initialized(&self, is_initialized: bool);
fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
async fn current_handle(&self) -> Option<InnerHandle>;
async fn set_current_handle(&self, handle: Option<InnerHandle>);
async fn workspace_folders(&self) -> Vec<WorkspaceFolder>;
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
/// Get the current code map.
fn current_code_map(&self) -> DashMap<String, Vec<u8>>;
fn code_map(&self) -> SafeMap<String, Vec<u8>>;
/// Insert a new code map.
fn insert_current_code_map(&self, uri: String, text: Vec<u8>);
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
// Remove from code map.
fn remove_from_code_map(&self, uri: String) -> Option<(String, Vec<u8>)>;
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>>;
/// Clear the current code state.
fn clear_code_state(&self);
async fn clear_code_state(&self);
/// Get the current diagnostics map.
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport>;
/// On change event.
async fn inner_on_change(&self, params: TextDocumentItem);
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
/// Check if the file has diagnostics.
async fn has_diagnostics(&self, uri: &str) -> bool {
if let Some(tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics)) =
self.current_diagnostics_map().get(uri).await
{
!diagnostics.full_document_diagnostic_report.items.is_empty()
} else {
false
}
}
async fn on_change(&self, params: TextDocumentItem) {
// Check if the document is in the current code map and if it is the same as what we have
// stored.
let filename = params.uri.to_string();
if let Some(current_code) = self.current_code_map().get(&filename) {
if current_code.value() == params.text.as_bytes() {
if let Some(current_code) = self.code_map().get(&filename).await {
if current_code == params.text.as_bytes() && !self.has_diagnostics(&filename).await {
return;
}
}
// Otherwise update the code map and call the inner on change.
self.insert_current_code_map(filename, params.text.as_bytes().to_vec());
self.inner_on_change(params).await;
// Check if we already have a handle running.
if let Some(current_handle) = self.current_handle().await {
self.set_current_handle(None).await;
// Drop that handle to cancel it.
current_handle.cancel();
}
let cloned = self.clone();
let task = JoinHandle::new(async move {
cloned
.insert_code_map(params.uri.to_string(), params.text.as_bytes().to_vec())
.await;
cloned.inner_on_change(params, false).await;
cloned.set_current_handle(None).await;
});
let update_handle = InnerHandle::new(task);
// Set our new handle.
self.set_current_handle(Some(update_handle.clone())).await;
}
async fn wait_on_handle(&self) {
while let Some(handle) = self.current_handle().await {
if !handle.is_finished() {
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
} else {
break;
}
}
self.set_current_handle(None).await;
}
async fn update_from_disk<P: AsRef<std::path::Path> + std::marker::Send>(&self, path: P) -> Result<()> {
@ -66,7 +164,7 @@ pub trait Backend {
.to_str()
.ok_or_else(|| anyhow::anyhow!("could not get name of file: {:?}", file))?
);
self.insert_current_code_map(file_path, contents);
self.insert_code_map(file_path, contents).await;
}
Ok(())
@ -76,6 +174,8 @@ pub trait Backend {
self.client()
.log_message(MessageType::INFO, format!("initialized: {:?}", params))
.await;
self.set_is_initialized(true).await;
}
async fn do_shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
@ -93,6 +193,7 @@ pub trait Backend {
for folder in params.event.added.iter() {
if !self
.workspace_folders()
.await
.iter()
.any(|f| f.uri == folder.uri && f.name == folder.name)
{
@ -106,12 +207,12 @@ pub trait Backend {
!(params.event.removed.is_empty() && params.event.added.is_empty())
};
self.add_workspace_folders(params.event.added.clone());
self.remove_workspace_folders(params.event.removed);
self.add_workspace_folders(params.event.added.clone()).await;
self.remove_workspace_folders(params.event.removed).await;
// Remove the code from the current code map.
// We do this since it means the user is changing projects so let's refresh the state.
if !self.current_code_map().is_empty() && should_clear {
self.clear_code_state();
if !self.code_map().is_empty().await && should_clear {
self.clear_code_state().await;
}
for added in params.event.added {
// Try to read all the files in the project.
@ -145,7 +246,7 @@ pub trait Backend {
.await;
// Create each file in the code map.
for file in params.files {
self.insert_current_code_map(file.uri.to_string(), Default::default());
self.insert_code_map(file.uri.to_string(), Default::default()).await;
}
}
@ -155,12 +256,12 @@ pub trait Backend {
.await;
// Rename each file in the code map.
for file in params.files {
if let Some((_, value)) = self.remove_from_code_map(file.old_uri) {
if let Some(value) = self.remove_from_code_map(file.old_uri).await {
// Rename the file if it exists.
self.insert_current_code_map(file.new_uri.to_string(), value);
self.insert_code_map(file.new_uri.to_string(), value).await;
} else {
// Otherwise create it.
self.insert_current_code_map(file.new_uri.to_string(), Default::default());
self.insert_code_map(file.new_uri.to_string(), Default::default()).await;
}
}
}
@ -171,7 +272,7 @@ pub trait Backend {
.await;
// Delete each file in the map.
for file in params.files {
self.remove_from_code_map(file.uri.to_string());
self.remove_from_code_map(file.uri.to_string()).await;
}
}

View File

@ -9,25 +9,28 @@ use std::{
sync::{Arc, RwLock},
};
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
use tower_lsp::{
jsonrpc::{Error, Result},
lsp_types::{
CreateFilesParams, DeleteFilesParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializeResult, InitializedParams,
MessageType, OneOf, RenameFilesParams, ServerCapabilities, TextDocumentItem, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticReport, InitializeParams,
InitializeResult, InitializedParams, MessageType, OneOf, RenameFilesParams, ServerCapabilities,
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, WorkspaceFolder,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
},
LanguageServer,
};
use self::types::{CopilotAcceptCompletionParams, CopilotCompletionTelemetry, CopilotRejectCompletionParams};
use super::backend::{InnerHandle, UpdateHandle};
use crate::lsp::{
backend::Backend as _,
copilot::types::{CopilotCompletionResponse, CopilotEditorInfo, CopilotLspCompletionParams, DocParams},
copilot::types::{
CopilotAcceptCompletionParams, CopilotCompletionResponse, CopilotCompletionTelemetry, CopilotEditorInfo,
CopilotLspCompletionParams, CopilotRejectCompletionParams, DocParams,
},
safemap::SafeMap,
};
#[derive(Deserialize, Serialize, Debug)]
@ -40,16 +43,16 @@ impl Success {
}
}
#[derive(Debug, Clone)]
#[derive(Clone)]
pub struct Backend {
/// The client is used to send notifications and requests to the client.
pub client: tower_lsp::Client,
/// The file system client to use.
pub fs: crate::fs::FileManager,
pub fs: Arc<crate::fs::FileManager>,
/// The workspace folders.
pub workspace_folders: DashMap<String, WorkspaceFolder>,
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
/// Current code.
pub current_code_map: DashMap<String, Vec<u8>>,
pub code_map: SafeMap<String, Vec<u8>>,
/// The Zoo API client.
pub zoo_client: kittycad::Client,
/// The editor info is used to store information about the editor.
@ -57,7 +60,10 @@ pub struct Backend {
/// The cache is used to store the results of previous requests.
pub cache: Arc<cache::CopilotCache>,
/// Storage so we can send telemetry data back out.
pub telemetry: DashMap<uuid::Uuid, CopilotCompletionTelemetry>,
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
pub current_handle: UpdateHandle,
}
// Implement the shared backend trait for the language server.
@ -67,43 +73,63 @@ impl crate::lsp::backend::Backend for Backend {
self.client.clone()
}
fn fs(&self) -> crate::fs::FileManager {
fn fs(&self) -> Arc<crate::fs::FileManager> {
self.fs.clone()
}
fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.iter().map(|v| v.value().clone()).collect()
async fn is_initialized(&self) -> bool {
*self.is_initialized.read().await
}
fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
async fn set_is_initialized(&self, is_initialized: bool) {
*self.is_initialized.write().await = is_initialized;
}
async fn current_handle(&self) -> Option<InnerHandle> {
self.current_handle.read().await
}
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
self.current_handle.write(handle).await;
}
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.inner().await.values().cloned().collect()
}
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.insert(folder.name.to_string(), folder);
self.workspace_folders.insert(folder.name.to_string(), folder).await;
}
}
fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.remove(&folder.name);
self.workspace_folders.remove(&folder.name).await;
}
}
fn current_code_map(&self) -> DashMap<String, Vec<u8>> {
self.current_code_map.clone()
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
self.code_map.clone()
}
fn insert_current_code_map(&self, uri: String, text: Vec<u8>) {
self.current_code_map.insert(uri, text);
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
self.code_map.insert(uri, text).await;
}
fn remove_from_code_map(&self, uri: String) -> Option<(String, Vec<u8>)> {
self.current_code_map.remove(&uri)
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
self.code_map.remove(&uri).await
}
fn clear_code_state(&self) {
self.current_code_map.clear();
async fn clear_code_state(&self) {
self.code_map.clear().await;
}
async fn inner_on_change(&self, _params: TextDocumentItem) {
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
Default::default()
}
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
// We don't need to do anything here.
}
}
@ -208,7 +234,7 @@ impl Backend {
completion: completion.clone(),
params: params.clone(),
};
self.telemetry.insert(completion.uuid, telemetry);
self.telemetry.insert(completion.uuid, telemetry).await;
}
self.cache
.set_cached_result(&doc_params.uri, &doc_params.pos.line, &response);
@ -222,7 +248,7 @@ impl Backend {
.await;
// Get the original telemetry data.
let Some((_, original)) = self.telemetry.remove(&params.uuid) else {
let Some(original) = self.telemetry.remove(&params.uuid).await else {
return;
};
@ -241,7 +267,7 @@ impl Backend {
// Get the original telemetry data.
let mut originals: Vec<CopilotCompletionTelemetry> = Default::default();
for uuid in params.uuids {
if let Some((_, original)) = self.telemetry.remove(&uuid) {
if let Some(original) = self.telemetry.remove(&uuid).await {
originals.push(original);
}
}

View File

@ -1,7 +1,7 @@
//! Custom notifications for the KCL LSP server that are not part of the LSP specification.
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::{notification::Notification, TextDocumentIdentifier};
use tower_lsp::lsp_types::notification::Notification;
/// A notification that the AST has changed.
#[derive(Debug)]
@ -21,9 +21,86 @@ impl Notification for MemoryUpdated {
const METHOD: &'static str = "kcl/memoryUpdated";
}
#[derive(Debug, Serialize, Deserialize, Clone)]
/// Text documents are identified using a URI. On the protocol level, URIs are passed as strings.
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct TextDocumentIdentifier {
/// The text document's URI.
pub uri: url::Url,
}
/// The valid types of length units.
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[ts(export)]
pub enum UnitLength {
/// Centimeters <https://en.wikipedia.org/wiki/Centimeter>
#[serde(rename = "cm")]
Cm,
/// Feet <https://en.wikipedia.org/wiki/Foot_(unit)>
#[serde(rename = "ft")]
Ft,
/// Inches <https://en.wikipedia.org/wiki/Inch>
#[serde(rename = "in")]
In,
/// Meters <https://en.wikipedia.org/wiki/Meter>
#[serde(rename = "m")]
M,
/// Millimeters <https://en.wikipedia.org/wiki/Millimeter>
#[serde(rename = "mm")]
Mm,
/// Yards <https://en.wikipedia.org/wiki/Yard>
#[serde(rename = "yd")]
Yd,
}
impl From<kittycad::types::UnitLength> for UnitLength {
fn from(unit: kittycad::types::UnitLength) -> Self {
match unit {
kittycad::types::UnitLength::Cm => UnitLength::Cm,
kittycad::types::UnitLength::Ft => UnitLength::Ft,
kittycad::types::UnitLength::In => UnitLength::In,
kittycad::types::UnitLength::M => UnitLength::M,
kittycad::types::UnitLength::Mm => UnitLength::Mm,
kittycad::types::UnitLength::Yd => UnitLength::Yd,
}
}
}
impl From<UnitLength> for kittycad::types::UnitLength {
fn from(unit: UnitLength) -> Self {
match unit {
UnitLength::Cm => kittycad::types::UnitLength::Cm,
UnitLength::Ft => kittycad::types::UnitLength::Ft,
UnitLength::In => kittycad::types::UnitLength::In,
UnitLength::M => kittycad::types::UnitLength::M,
UnitLength::Mm => kittycad::types::UnitLength::Mm,
UnitLength::Yd => kittycad::types::UnitLength::Yd,
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct UpdateUnitsParams {
pub text_document: TextDocumentIdentifier,
pub units: kittycad::types::UnitLength,
/// The content of the text document.
pub text: String,
pub units: UnitLength,
}
#[derive(Debug, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[ts(export)]
pub struct UpdateUnitsResponse {}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct UpdateCanExecuteParams {
pub can_execute: bool,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize, ts_rs::TS)]
#[ts(export)]
pub struct UpdateCanExecuteResponse {}

View File

@ -9,7 +9,6 @@ pub mod custom_notifications;
use anyhow::Result;
#[cfg(feature = "cli")]
use clap::Parser;
use dashmap::DashMap;
use sha2::Digest;
use tower_lsp::{
jsonrpc::Result as RpcResult,
@ -19,22 +18,27 @@ use tower_lsp::{
DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams,
DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, DocumentDiagnosticParams,
DocumentDiagnosticReport, DocumentDiagnosticReportResult, DocumentFilter, DocumentFormattingParams,
DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse, Documentation, FullDocumentDiagnosticReport,
Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult,
InitializedParams, InlayHint, InlayHintParams, InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf,
Position, RelatedFullDocumentDiagnosticReport, RenameFilesParams, RenameParams, SemanticToken,
SemanticTokenType, SemanticTokens, SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions,
SemanticTokensParams, SemanticTokensRegistrationOptions, SemanticTokensResult,
SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp, SignatureHelpOptions, SignatureHelpParams,
StaticRegistrationOptions, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions, WorkspaceEdit,
WorkspaceFolder, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse, Documentation, FoldingRange, FoldingRangeParams,
FoldingRangeProviderCapability, FullDocumentDiagnosticReport, Hover, HoverContents, HoverParams,
HoverProviderCapability, InitializeParams, InitializeResult, InitializedParams, InlayHint, InlayHintParams,
InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf, Position, RelatedFullDocumentDiagnosticReport,
RenameFilesParams, RenameParams, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensFullOptions,
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensRegistrationOptions,
SemanticTokensResult, SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp,
SignatureHelpOptions, SignatureHelpParams, StaticRegistrationOptions, TextDocumentItem,
TextDocumentRegistrationOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TextEdit, WorkDoneProgressOptions, WorkspaceEdit, WorkspaceFolder, WorkspaceFoldersServerCapabilities,
WorkspaceServerCapabilities,
},
Client, LanguageServer,
};
use super::backend::{InnerHandle, UpdateHandle};
use crate::{
ast::types::VariableKind, errors::KclError, executor::SourceRange, lsp::backend::Backend as _,
ast::types::VariableKind,
errors::KclError,
executor::SourceRange,
lsp::{backend::Backend as _, safemap::SafeMap},
parser::PIPE_OPERATOR,
};
@ -57,9 +61,9 @@ pub struct Backend {
/// The client for the backend.
pub client: Client,
/// The file system client to use.
pub fs: crate::fs::FileManager,
pub fs: Arc<crate::fs::FileManager>,
/// The workspace folders.
pub workspace_folders: DashMap<String, WorkspaceFolder>,
pub workspace_folders: SafeMap<String, WorkspaceFolder>,
/// The stdlib completions for the language.
pub stdlib_completions: HashMap<String, CompletionItem>,
/// The stdlib signatures for the language.
@ -67,25 +71,30 @@ pub struct Backend {
/// The types of tokens the server supports.
pub token_types: Vec<SemanticTokenType>,
/// Token maps.
pub token_map: DashMap<String, Vec<crate::token::Token>>,
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
/// AST maps.
pub ast_map: DashMap<String, crate::ast::types::Program>,
pub ast_map: SafeMap<String, crate::ast::types::Program>,
/// Memory maps.
pub memory_map: DashMap<String, crate::executor::ProgramMemory>,
pub memory_map: SafeMap<String, crate::executor::ProgramMemory>,
/// Current code.
pub current_code_map: DashMap<String, Vec<u8>>,
pub code_map: SafeMap<String, Vec<u8>>,
/// Diagnostics.
pub diagnostics_map: DashMap<String, DocumentDiagnosticReport>,
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
/// Symbols map.
pub symbols_map: DashMap<String, Vec<DocumentSymbol>>,
pub symbols_map: SafeMap<String, Vec<DocumentSymbol>>,
/// Semantic tokens map.
pub semantic_tokens_map: DashMap<String, Vec<SemanticToken>>,
pub semantic_tokens_map: SafeMap<String, Vec<SemanticToken>>,
/// The Zoo API client.
pub zoo_client: kittycad::Client,
/// If we can send telemetry for this user.
pub can_send_telemetry: bool,
/// Optional executor context to use if we want to execute the code.
pub executor_ctx: Arc<RwLock<Option<crate::executor::ExecutorContext>>>,
/// If we are currently allowed to execute the ast.
pub can_execute: Arc<RwLock<bool>>,
pub is_initialized: Arc<RwLock<bool>>,
pub current_handle: UpdateHandle,
}
// Implement the shared backend trait for the language server.
@ -95,54 +104,182 @@ impl crate::lsp::backend::Backend for Backend {
self.client.clone()
}
fn fs(&self) -> crate::fs::FileManager {
fn fs(&self) -> Arc<crate::fs::FileManager> {
self.fs.clone()
}
fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.iter().map(|v| v.value().clone()).collect()
async fn is_initialized(&self) -> bool {
*self.is_initialized.read().await
}
fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
async fn set_is_initialized(&self, is_initialized: bool) {
*self.is_initialized.write().await = is_initialized;
}
async fn current_handle(&self) -> Option<InnerHandle> {
self.current_handle.read().await
}
async fn set_current_handle(&self, handle: Option<InnerHandle>) {
self.current_handle.write(handle).await;
}
async fn workspace_folders(&self) -> Vec<WorkspaceFolder> {
self.workspace_folders.inner().await.values().cloned().collect()
}
async fn add_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.insert(folder.name.to_string(), folder);
self.workspace_folders.insert(folder.name.to_string(), folder).await;
}
}
fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>) {
for folder in folders {
self.workspace_folders.remove(&folder.name);
self.workspace_folders.remove(&folder.name).await;
}
}
fn current_code_map(&self) -> DashMap<String, Vec<u8>> {
self.current_code_map.clone()
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
self.code_map.clone()
}
fn insert_current_code_map(&self, uri: String, text: Vec<u8>) {
self.current_code_map.insert(uri, text);
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
self.code_map.insert(uri, text).await;
}
fn remove_from_code_map(&self, uri: String) -> Option<(String, Vec<u8>)> {
self.current_code_map.remove(&uri)
async fn remove_from_code_map(&self, uri: String) -> Option<Vec<u8>> {
self.code_map.remove(&uri).await
}
fn clear_code_state(&self) {
self.current_code_map.clear();
self.token_map.clear();
self.ast_map.clear();
self.diagnostics_map.clear();
self.symbols_map.clear();
self.semantic_tokens_map.clear();
async fn clear_code_state(&self) {
self.code_map.clear().await;
self.token_map.clear().await;
self.ast_map.clear().await;
self.diagnostics_map.clear().await;
self.symbols_map.clear().await;
self.semantic_tokens_map.clear().await;
}
async fn inner_on_change(&self, params: TextDocumentItem) {
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
self.diagnostics_map.clone()
}
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
// We already updated the code map in the shared backend.
// Lets update the tokens.
let tokens = crate::token::lexer(&params.text);
self.token_map.insert(params.uri.to_string(), tokens.clone());
let tokens = match crate::token::lexer(&params.text) {
Ok(tokens) => tokens,
Err(err) => {
self.add_to_diagnostics(&params, err).await;
return;
}
};
// Get the previous tokens.
let previous_tokens = self.token_map.get(&params.uri.to_string()).await;
// Try to get the memory for the current code.
let has_memory = if let Some(memory) = self.memory_map.get(&params.uri.to_string()).await {
memory != crate::executor::ProgramMemory::default()
} else {
false
};
let tokens_changed = if let Some(previous_tokens) = previous_tokens.clone() {
previous_tokens != tokens
} else {
true
};
// Check if the tokens are the same.
if !tokens_changed && !force && has_memory && !self.has_diagnostics(params.uri.as_ref()).await {
// We return early here because the tokens are the same.
return;
}
if tokens_changed {
// Update our token map.
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
// Update our semantic tokens.
self.update_semantic_tokens(tokens.clone(), &params).await;
}
// Lets update the ast.
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let ast = match result {
Ok(ast) => ast,
Err(err) => {
self.add_to_diagnostics(&params, err).await;
return;
}
};
// Check if the ast changed.
let ast_changed = match self.ast_map.get(&params.uri.to_string()).await {
Some(old_ast) => {
// Check if the ast changed.
old_ast != ast
}
None => true,
};
if !ast_changed && !force && has_memory && !self.has_diagnostics(params.uri.as_ref()).await {
// Return early if the ast did not change and we don't need to force.
return;
}
if ast_changed {
self.ast_map.insert(params.uri.to_string(), ast.clone()).await;
// Update the symbols map.
self.symbols_map
.insert(params.uri.to_string(), ast.get_lsp_symbols(&params.text))
.await;
}
// Send the notification to the client that the ast was updated.
if self.can_execute().await {
// Only send the notification if we can execute.
// Otherwise it confuses the client.
self.client
.send_notification::<custom_notifications::AstUpdated>(ast.clone())
.await;
}
// Execute the code if we have an executor context.
// This function automatically executes if we should & updates the diagnostics if we got
// errors.
let result = self.execute(&params, ast).await;
if result.is_err() {
// We return early because we got errors, and we don't want to clear the diagnostics.
return;
}
// Lets update the diagnostics, since we got no errors.
self.clear_diagnostics(&params.uri).await;
}
}
impl Backend {
pub async fn can_execute(&self) -> bool {
*self.can_execute.read().await
}
async fn set_can_execute(&self, can_execute: bool) {
*self.can_execute.write().await = can_execute;
}
pub async fn executor_ctx(&self) -> Option<crate::executor::ExecutorContext> {
self.executor_ctx.read().await.clone()
}
async fn set_executor_ctx(&self, executor_ctx: crate::executor::ExecutorContext) {
*self.executor_ctx.write().await = Some(executor_ctx);
}
async fn update_semantic_tokens(&self, tokens: Vec<crate::token::Token>, params: &TextDocumentItem) {
// Update the semantic tokens map.
let mut semantic_tokens = vec![];
let mut last_position = Position::new(0, 0);
@ -192,70 +329,16 @@ impl crate::lsp::backend::Backend for Backend {
last_position = position;
}
self.semantic_tokens_map.insert(params.uri.to_string(), semantic_tokens);
// Lets update the ast.
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let ast = match result {
Ok(ast) => ast,
Err(err) => {
self.add_to_diagnostics(&params, err).await;
return;
}
};
// Check if the ast changed.
let ast_changed = match self.ast_map.get(&params.uri.to_string()) {
Some(old_ast) => {
// Check if the ast changed.
*old_ast.value() != ast
}
None => true,
};
// If the ast changed update the map and symbols and execute if we need to.
if ast_changed {
// Update the symbols map.
self.symbols_map
.insert(params.uri.to_string(), ast.get_lsp_symbols(&params.text));
self.ast_map.insert(params.uri.to_string(), ast.clone());
// Send the notification to the client that the ast was updated.
self.client
.send_notification::<custom_notifications::AstUpdated>(ast.clone())
self.semantic_tokens_map
.insert(params.uri.to_string(), semantic_tokens)
.await;
// Execute the code if we have an executor context.
// This function automatically executes if we should & updates the diagnostics if we got
// errors.
self.execute(&params, ast).await;
}
// Lets update the diagnostics, since we got no errors.
self.diagnostics_map.insert(
params.uri.to_string(),
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport {
result_id: None,
items: vec![],
},
}),
);
// Publish the diagnostic, we reset it here so the client knows the code compiles now.
// If the client supports it.
self.client.publish_diagnostics(params.uri.clone(), vec![], None).await;
}
}
impl Backend {
async fn add_to_diagnostics(&self, params: &TextDocumentItem, err: KclError) {
let diagnostic = err.to_lsp_diagnostic(&params.text);
// We got errors, update the diagnostics.
self.diagnostics_map.insert(
self.diagnostics_map
.insert(
params.uri.to_string(),
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
related_documents: None,
@ -264,7 +347,8 @@ impl Backend {
items: vec![diagnostic.clone()],
},
}),
);
)
.await;
// Publish the diagnostic.
// If the client supports it.
@ -273,27 +357,64 @@ impl Backend {
.await;
}
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) {
async fn clear_diagnostics(&self, uri: &url::Url) {
self.diagnostics_map
.insert(
uri.to_string(),
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: FullDocumentDiagnosticReport {
result_id: None,
items: vec![],
},
}),
)
.await;
// Publish the diagnostic, we reset it here so the client knows the code compiles now.
// If the client supports it.
self.client.publish_diagnostics(uri.clone(), vec![], None).await;
}
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) -> Result<()> {
// Check if we can execute.
if !self.can_execute().await {
return Ok(());
}
// Execute the code if we have an executor context.
let Some(executor_ctx) = self.executor_ctx.read().await.clone() else {
return;
let Some(executor_ctx) = self.executor_ctx().await else {
return Ok(());
};
if !self.is_initialized().await {
// We are not initialized yet.
return Ok(());
}
// Clear the scene, before we execute so it's not fugly as shit.
executor_ctx.engine.clear_scene(SourceRange::default()).await?;
let memory = match executor_ctx.run(ast, None).await {
Ok(memory) => memory,
Err(err) => {
self.add_to_diagnostics(params, err).await;
return;
// Since we already published the diagnostics we don't really care about the error
// string.
return Err(anyhow::anyhow!("failed to execute code"));
}
};
drop(executor_ctx); // Drop the lock here.
drop(executor_ctx);
self.memory_map.insert(params.uri.to_string(), memory.clone()).await;
self.memory_map.insert(params.uri.to_string(), memory.clone());
// Send the notification to the client that the memory was updated.
self.client
.send_notification::<custom_notifications::MemoryUpdated>(memory)
.await;
Ok(())
}
fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<usize> {
@ -303,7 +424,7 @@ impl Backend {
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
let mut completions = vec![];
let ast = match self.ast_map.get(file_name) {
let ast = match self.ast_map.get(file_name).await {
Some(ast) => ast,
None => return completions,
};
@ -348,16 +469,16 @@ impl Backend {
completions
}
pub fn create_zip(&self) -> Result<Vec<u8>> {
pub async fn create_zip(&self) -> Result<Vec<u8>> {
// Collect all the file data we know.
let mut buf = vec![];
let mut zip = zip::ZipWriter::new(std::io::Cursor::new(&mut buf));
for entry in self.current_code_map.iter() {
let file_name = entry.key().replace("file://", "").to_string();
for (entry, value) in self.code_map.inner().await.iter() {
let file_name = entry.replace("file://", "").to_string();
let options = zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Stored);
zip.start_file(file_name, options)?;
zip.write_all(entry.value())?;
zip.write_all(value)?;
}
// Apply the changes you've made.
// Dropping the `ZipWriter` will have the same effect, but may silently fail
@ -388,7 +509,7 @@ impl Backend {
// Get the workspace folders.
// The key of the workspace folder is the project name.
let workspace_folders = self.workspace_folders();
let workspace_folders = self.workspace_folders().await;
let project_names: Vec<String> = workspace_folders.iter().map(|v| v.name.clone()).collect::<Vec<_>>();
// Get the first name.
let project_name = project_names
@ -405,7 +526,7 @@ impl Backend {
name: "attachment".to_string(),
filename: Some("attachment.zip".to_string()),
content_type: Some("application/x-zip".to_string()),
data: self.create_zip()?,
data: self.create_zip().await?,
}],
&kittycad::types::Event {
// This gets generated server side so leave empty for now.
@ -429,49 +550,89 @@ impl Backend {
Ok(())
}
pub async fn update_units(&self, params: custom_notifications::UpdateUnitsParams) {
pub async fn update_units(
&self,
params: custom_notifications::UpdateUnitsParams,
) -> RpcResult<Option<custom_notifications::UpdateUnitsResponse>> {
let filename = params.text_document.uri.to_string();
{
let Some(mut executor_ctx) = self.executor_ctx.read().await.clone() else {
let Some(mut executor_ctx) = self.executor_ctx().await else {
self.client
.log_message(MessageType::ERROR, "no executor context set to update units for")
.await;
return;
return Ok(None);
};
self.client
.log_message(MessageType::INFO, format!("update units: {:?}", params))
.await;
// Try to get the memory for the current code.
let has_memory = if let Some(memory) = self.memory_map.get(&filename).await {
memory != crate::executor::ProgramMemory::default()
} else {
false
};
let units: kittycad::types::UnitLength = params.units.into();
if executor_ctx.units == units
&& !self.has_diagnostics(params.text_document.uri.as_ref()).await
&& has_memory
{
// Return early the units are the same.
return Ok(None);
}
// Set the engine units.
executor_ctx.update_units(params.units);
executor_ctx.update_units(units);
// Update the locked executor context.
*self.executor_ctx.write().await = Some(executor_ctx.clone());
self.set_executor_ctx(executor_ctx.clone()).await;
drop(executor_ctx);
}
// Lock is dropped here since nested.
// This is IMPORTANT.
let filename = params.text_document.uri.to_string();
// Get the current code.
let Some(current_code) = self.current_code_map.get(&filename) else {
return;
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
return;
};
// Get the current ast.
let Some(ast) = self.ast_map.get(&filename) else {
return;
};
let new_params = TextDocumentItem {
uri: params.text_document.uri,
text: std::mem::take(&mut current_code.to_string()),
uri: params.text_document.uri.clone(),
text: std::mem::take(&mut params.text.to_string()),
version: Default::default(),
language_id: Default::default(),
};
self.execute(&new_params, ast.value().clone()).await;
// Force re-execution.
self.inner_on_change(new_params, true).await;
// Check if we have diagnostics.
// If we do we return early, since we failed in some way.
if self.has_diagnostics(params.text_document.uri.as_ref()).await {
return Ok(None);
}
Ok(Some(custom_notifications::UpdateUnitsResponse {}))
}
pub async fn update_can_execute(
&self,
params: custom_notifications::UpdateCanExecuteParams,
) -> RpcResult<custom_notifications::UpdateCanExecuteResponse> {
let can_execute = self.can_execute().await;
if can_execute == params.can_execute {
return Ok(custom_notifications::UpdateCanExecuteResponse {});
}
if !params.can_execute {
// Kill any in progress executions.
if let Some(current_handle) = self.current_handle().await {
current_handle.cancel();
}
}
self.set_can_execute(params.can_execute).await;
Ok(custom_notifications::UpdateCanExecuteResponse {})
}
}
@ -495,7 +656,7 @@ impl LanguageServer for Backend {
..Default::default()
})),
document_formatting_provider: Some(OneOf::Left(true)),
document_symbol_provider: Some(OneOf::Left(true)),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
hover_provider: Some(HoverProviderCapability::Simple(true)),
inlay_hint_provider: Some(OneOf::Left(true)),
rename_provider: Some(OneOf::Left(true)),
@ -621,7 +782,7 @@ impl LanguageServer for Backend {
async fn hover(&self, params: HoverParams) -> RpcResult<Option<Hover>> {
let filename = params.text_document_position_params.text_document.uri.to_string();
let Some(current_code) = self.current_code_map.get(&filename) else {
let Some(current_code) = self.code_map.get(&filename).await else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -631,7 +792,7 @@ impl LanguageServer for Backend {
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
// Let's iterate over the AST and find the node that contains the cursor.
let Some(ast) = self.ast_map.get(&filename) else {
let Some(ast) = self.ast_map.get(&filename).await else {
return Ok(None);
};
@ -719,7 +880,7 @@ impl LanguageServer for Backend {
let filename = params.text_document.uri.to_string();
// Get the current diagnostics for this file.
let Some(diagnostic) = self.diagnostics_map.get(&filename) else {
let Some(diagnostic) = self.diagnostics_map.get(&filename).await else {
// Send an empty report.
return Ok(DocumentDiagnosticReportResult::Report(DocumentDiagnosticReport::Full(
RelatedFullDocumentDiagnosticReport {
@ -738,7 +899,7 @@ impl LanguageServer for Backend {
async fn signature_help(&self, params: SignatureHelpParams) -> RpcResult<Option<SignatureHelp>> {
let filename = params.text_document_position_params.text_document.uri.to_string();
let Some(current_code) = self.current_code_map.get(&filename) else {
let Some(current_code) = self.code_map.get(&filename).await else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -748,7 +909,7 @@ impl LanguageServer for Backend {
let pos = position_to_char_index(params.text_document_position_params.position, current_code);
// Let's iterate over the AST and find the node that contains the cursor.
let Some(ast) = self.ast_map.get(&filename) else {
let Some(ast) = self.ast_map.get(&filename).await else {
return Ok(None);
};
@ -796,7 +957,7 @@ impl LanguageServer for Backend {
async fn semantic_tokens_full(&self, params: SemanticTokensParams) -> RpcResult<Option<SemanticTokensResult>> {
let filename = params.text_document.uri.to_string();
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename) else {
let Some(semantic_tokens) = self.semantic_tokens_map.get(&filename).await else {
return Ok(None);
};
@ -809,7 +970,7 @@ impl LanguageServer for Backend {
async fn document_symbol(&self, params: DocumentSymbolParams) -> RpcResult<Option<DocumentSymbolResponse>> {
let filename = params.text_document.uri.to_string();
let Some(symbols) = self.symbols_map.get(&filename) else {
let Some(symbols) = self.symbols_map.get(&filename).await else {
return Ok(None);
};
@ -819,7 +980,7 @@ impl LanguageServer for Backend {
async fn formatting(&self, params: DocumentFormattingParams) -> RpcResult<Option<Vec<TextEdit>>> {
let filename = params.text_document.uri.to_string();
let Some(current_code) = self.current_code_map.get(&filename) else {
let Some(current_code) = self.code_map.get(&filename).await else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -829,7 +990,9 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::token::lexer(current_code);
let Ok(tokens) = crate::token::lexer(current_code) else {
return Ok(None);
};
let parser = crate::parser::Parser::new(tokens);
let Ok(ast) = parser.ast() else {
return Ok(None);
@ -854,7 +1017,7 @@ impl LanguageServer for Backend {
async fn rename(&self, params: RenameParams) -> RpcResult<Option<WorkspaceEdit>> {
let filename = params.text_document_position.text_document.uri.to_string();
let Some(current_code) = self.current_code_map.get(&filename) else {
let Some(current_code) = self.code_map.get(&filename).await else {
return Ok(None);
};
let Ok(current_code) = std::str::from_utf8(&current_code) else {
@ -864,7 +1027,9 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::token::lexer(current_code);
let Ok(tokens) = crate::token::lexer(current_code) else {
return Ok(None);
};
let parser = crate::parser::Parser::new(tokens);
let Ok(mut ast) = parser.ast() else {
return Ok(None);
@ -890,6 +1055,24 @@ impl LanguageServer for Backend {
change_annotations: None,
}))
}
async fn folding_range(&self, params: FoldingRangeParams) -> RpcResult<Option<Vec<FoldingRange>>> {
let filename = params.text_document.uri.to_string();
// Get the ast.
let Some(ast) = self.ast_map.get(&filename).await else {
return Ok(None);
};
// Get the folding ranges.
let folding_ranges = ast.get_lsp_folding_ranges();
if folding_ranges.is_empty() {
return Ok(None);
}
Ok(Some(folding_ranges))
}
}
/// Get completions from our stdlib.

View File

@ -1,8 +1,9 @@
//! The servers that power the text editor.
mod backend;
pub mod backend;
pub mod copilot;
pub mod kcl;
mod safemap;
#[cfg(test)]
mod tests;
mod util;
pub mod util;

View File

@ -0,0 +1,60 @@
//! A map type that is safe to use in a concurrent environment.
//! But also in wasm.
//! Previously, we used `dashmap::DashMap` for this purpose, but it doesn't work in wasm.
use std::{borrow::Borrow, collections::HashMap, hash::Hash, sync::Arc};
use tokio::sync::RwLock;
/// A thread-safe map type.
#[derive(Clone, Debug)]
pub struct SafeMap<K: Eq + Hash + Clone, V: Clone>(Arc<RwLock<HashMap<K, V>>>);
impl<K: Eq + Hash + Clone, V: Clone> SafeMap<K, V> {
/// Create a new empty map.
pub fn new() -> Self {
SafeMap(Arc::new(RwLock::new(HashMap::new())))
}
pub async fn len(&self) -> usize {
self.0.read().await.len()
}
pub async fn is_empty(&self) -> bool {
self.0.read().await.is_empty()
}
pub async fn clear(&self) {
self.0.write().await.clear();
}
/// Insert a key-value pair into the map.
pub async fn insert(&self, key: K, value: V) {
self.0.write().await.insert(key, value);
}
/// Get a reference to the value associated with the given key.
pub async fn get<Q>(&self, key: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq + ?Sized,
{
self.0.read().await.get(key).cloned()
}
/// Remove the key-value pair associated with the given key.
pub async fn remove(&self, key: &K) -> Option<V> {
self.0.write().await.remove(key)
}
/// Get a reference to the underlying map.
pub async fn inner(&self) -> HashMap<K, V> {
self.0.read().await.clone()
}
}
impl<K: Eq + Hash + Clone, V: Clone> Default for SafeMap<K, V> {
fn default() -> Self {
SafeMap::new()
}
}

File diff suppressed because it is too large Load Diff

View File

@ -26,13 +26,6 @@ impl Parser {
/// Run the parser
pub fn ast(&self) -> Result<Program, KclError> {
if self.tokens.is_empty() {
return Err(KclError::Syntax(KclErrorDetails {
source_ranges: vec![],
message: "file is empty".to_string(),
}));
}
if !self.unknown_tokens.is_empty() {
let source_ranges = self.unknown_tokens.iter().map(SourceRange::from).collect();
let token_list = self.unknown_tokens.iter().map(|t| t.value.as_str()).collect::<Vec<_>>();
@ -44,6 +37,21 @@ impl Parser {
return Err(KclError::Lexical(KclErrorDetails { source_ranges, message }));
}
// Important, to not call this before the unknown tokens check.
if self.tokens.is_empty() {
// Empty file should just do nothing.
return Ok(Program::default());
}
// Check all the tokens are whitespace or comments.
if self
.tokens
.iter()
.all(|t| t.token_type.is_whitespace() || t.token_type.is_comment())
{
return Ok(Program::default());
}
parser_impl::run_parser(&mut self.tokens.as_slice())
}
}

View File

@ -1395,7 +1395,7 @@ mod tests {
#[test]
fn parse_args() {
for (i, (test, expected_len)) in [("someVar", 1), ("5, 3", 2), (r#""a""#, 1)].into_iter().enumerate() {
let tokens = crate::token::lexer(test);
let tokens = crate::token::lexer(test).unwrap();
let actual = match arguments.parse(&tokens) {
Ok(x) => x,
Err(e) => panic!("Failed test {i}, could not parse function arguments from \"{test}\": {e:?}"),
@ -1406,7 +1406,7 @@ mod tests {
#[test]
fn weird_program_unclosed_paren() {
let tokens = crate::token::lexer("fn firstPrime=(");
let tokens = crate::token::lexer("fn firstPrime=(").unwrap();
let last = tokens.last().unwrap();
let err: KclError = program.parse(&tokens).unwrap_err().into();
assert_eq!(err.source_ranges(), last.as_source_ranges());
@ -1417,7 +1417,7 @@ mod tests {
#[test]
fn weird_program_just_a_pipe() {
let tokens = crate::token::lexer("|");
let tokens = crate::token::lexer("|").unwrap();
let err: KclError = program.parse(&tokens).unwrap_err().into();
assert_eq!(err.source_ranges(), vec![SourceRange([0, 1])]);
assert_eq!(err.message(), "Unexpected token");
@ -1426,7 +1426,7 @@ mod tests {
#[test]
fn parse_binary_expressions() {
for (i, test_program) in ["1 + 2 + 3"].into_iter().enumerate() {
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = tokens.as_slice();
let _actual = match binary_expression.parse_next(&mut slice) {
Ok(x) => x,
@ -1437,7 +1437,7 @@ mod tests {
#[test]
fn test_negative_operands() {
let tokens = crate::token::lexer("-leg2");
let tokens = crate::token::lexer("-leg2").unwrap();
let _s = operand.parse_next(&mut tokens.as_slice()).unwrap();
}
@ -1451,7 +1451,7 @@ mod tests {
// comment 2
return 1
}"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = tokens.as_slice();
let expr = function_expression.parse_next(&mut slice).unwrap();
assert_eq!(expr.params, vec![]);
@ -1469,7 +1469,7 @@ mod tests {
const yo = { a: { b: { c: '123' } } } /* block
comment */
}"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = tokens.as_slice();
let expr = function_expression.parse_next(&mut slice).unwrap();
let comment0 = &expr.body.non_code_meta.non_code_nodes.get(&0).unwrap()[0];
@ -1482,7 +1482,7 @@ comment */
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let program = program.parse(&tokens).unwrap();
let mut starting_comments = program.non_code_meta.start;
assert_eq!(starting_comments.len(), 2);
@ -1500,7 +1500,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_comment_in_pipe() {
let tokens = crate::token::lexer(r#"const x = y() |> /*hi*/ z(%)"#);
let tokens = crate::token::lexer(r#"const x = y() |> /*hi*/ z(%)"#).unwrap();
let mut body = program.parse(&tokens).unwrap().body;
let BodyItem::VariableDeclaration(mut item) = body.remove(0) else {
panic!("expected vardec");
@ -1527,7 +1527,7 @@ const mySk1 = startSketchAt([0, 0])"#;
return sg
return sg
}"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = tokens.as_slice();
let _expr = function_expression.parse_next(&mut slice).unwrap();
}
@ -1538,7 +1538,7 @@ const mySk1 = startSketchAt([0, 0])"#;
return 2
}";
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = tokens.as_slice();
let expr = function_expression.parse_next(&mut slice).unwrap();
assert_eq!(
@ -1581,7 +1581,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|> c(%) // inline-comment
|> d(%)"#;
let tokens = crate::token::lexer(test_input);
let tokens = crate::token::lexer(test_input).unwrap();
let mut slice = tokens.as_slice();
let PipeExpression {
body, non_code_meta, ..
@ -1608,7 +1608,7 @@ const mySk1 = startSketchAt([0, 0])"#;
return things
"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let Program { non_code_meta, .. } = function_body.parse(&tokens).unwrap();
assert_eq!(
vec![NonCodeNode {
@ -1658,7 +1658,7 @@ const mySk1 = startSketchAt([0, 0])"#;
comment */
return 1"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let actual = program.parse(&tokens).unwrap();
assert_eq!(actual.non_code_meta.non_code_nodes.len(), 1);
assert_eq!(
@ -1673,7 +1673,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_bracketed_binary_expression() {
let input = "(2 - 3)";
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
let actual = match binary_expr_in_parens.parse(&tokens) {
Ok(x) => x,
Err(e) => panic!("{e:?}"),
@ -1688,7 +1688,7 @@ const mySk1 = startSketchAt([0, 0])"#;
"6 / ( sigmaAllow * width )",
"sqrt(distance * p * FOS * 6 / ( sigmaAllow * width ))",
] {
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
let _actual = match value.parse(&tokens) {
Ok(x) => x,
Err(e) => panic!("{e:?}"),
@ -1699,7 +1699,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_arithmetic() {
let input = "1 * (2 - 3)";
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
// The RHS should be a binary expression.
let actual = binary_expression.parse(&tokens).unwrap();
assert_eq!(actual.operator, BinaryOperator::Mul);
@ -1729,7 +1729,7 @@ const mySk1 = startSketchAt([0, 0])"#;
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(test_input);
let tokens = crate::token::lexer(test_input).unwrap();
let mut actual = match declaration.parse(&tokens) {
Err(e) => panic!("Could not parse test {i}: {e:#?}"),
Ok(a) => a,
@ -1747,7 +1747,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_function_call() {
for (i, test_input) in ["const x = f(1)", "const x = f( 1 )"].into_iter().enumerate() {
let tokens = crate::token::lexer(test_input);
let tokens = crate::token::lexer(test_input).unwrap();
let _actual = match declaration.parse(&tokens) {
Err(e) => panic!("Could not parse test {i}: {e:#?}"),
Ok(a) => a,
@ -1758,7 +1758,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_nested_arithmetic() {
let input = "1 * ((2 - 3) / 4)";
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
// The RHS should be a binary expression.
let outer = binary_expression.parse(&tokens).unwrap();
assert_eq!(outer.operator, BinaryOperator::Mul);
@ -1777,7 +1777,7 @@ const mySk1 = startSketchAt([0, 0])"#;
fn binary_expression_ignores_whitespace() {
let tests = ["1 - 2", "1- 2", "1 -2", "1-2"];
for test in tests {
let tokens = crate::token::lexer(test);
let tokens = crate::token::lexer(test).unwrap();
let actual = binary_expression.parse(&tokens).unwrap();
assert_eq!(actual.operator, BinaryOperator::Sub);
let BinaryPart::Literal(left) = actual.left else {
@ -1798,7 +1798,7 @@ const mySk1 = startSketchAt([0, 0])"#;
a comment
spanning a few lines */
|> z(%)"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let actual = pipe_expression.parse(&tokens).unwrap();
let n = actual.non_code_meta.non_code_nodes.len();
assert_eq!(n, 1, "expected one comment in pipe expression but found {n}");
@ -1826,7 +1826,7 @@ const mySk1 = startSketchAt([0, 0])"#;
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let actual = pipe_expression.parse(&tokens);
assert!(actual.is_ok(), "could not parse test {i}, '{test_program}'");
let actual = actual.unwrap();
@ -1938,7 +1938,7 @@ const mySk1 = startSketchAt([0, 0])"#;
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let actual = non_code_node.parse(&tokens);
assert!(actual.is_ok(), "could not parse test {i}: {actual:#?}");
let actual = actual.unwrap();
@ -1949,7 +1949,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn recognize_invalid_params() {
let test_fn = "(let) => { return 1 }";
let tokens = crate::token::lexer(test_fn);
let tokens = crate::token::lexer(test_fn).unwrap();
let err = function_expression.parse(&tokens).unwrap_err().into_inner();
let cause = err.cause.unwrap();
// This is the token `let`
@ -1962,7 +1962,7 @@ const mySk1 = startSketchAt([0, 0])"#;
let string_literal = r#""
// a comment
""#;
let tokens = crate::token::lexer(string_literal);
let tokens = crate::token::lexer(string_literal).unwrap();
let parsed_literal = literal.parse(&tokens).unwrap();
assert_eq!(
parsed_literal.value,
@ -1979,7 +1979,7 @@ const mySk1 = startSketchAt([0, 0])"#;
|> lineTo([0, -0], %) // MoveRelative
"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let mut slice = &tokens[..];
let _actual = pipe_expression.parse_next(&mut slice).unwrap();
assert_eq!(slice[0].token_type, TokenType::Whitespace);
@ -1988,14 +1988,14 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_pipes_on_pipes() {
let test_program = include_str!("../../../tests/executor/inputs/pipes_on_pipes.kcl");
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let _actual = program.parse(&tokens).unwrap();
}
#[test]
fn test_cube() {
let test_program = include_str!("../../../tests/executor/inputs/cube.kcl");
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
match program.parse(&tokens) {
Ok(_) => {}
Err(e) => {
@ -2013,7 +2013,7 @@ const mySk1 = startSketchAt([0, 0])"#;
("a,b", vec!["a", "b"]),
];
for (i, (input, expected)) in tests.into_iter().enumerate() {
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
let actual = parameters.parse(&tokens);
assert!(actual.is_ok(), "could not parse test {i}");
let actual_ids: Vec<_> = actual.unwrap().into_iter().map(|p| p.identifier.name).collect();
@ -2027,7 +2027,7 @@ const mySk1 = startSketchAt([0, 0])"#;
return 2
}";
let tokens = crate::token::lexer(input);
let tokens = crate::token::lexer(input).unwrap();
let actual = function_expression.parse(&tokens);
assert!(actual.is_ok(), "could not parse test function");
}
@ -2037,7 +2037,7 @@ const mySk1 = startSketchAt([0, 0])"#;
let tests = ["const myVar = 5", "const myVar=5", "const myVar =5", "const myVar= 5"];
for test in tests {
// Run the original parser
let tokens = crate::token::lexer(test);
let tokens = crate::token::lexer(test).unwrap();
let mut expected_body = crate::parser::Parser::new(tokens.clone()).ast().unwrap().body;
assert_eq!(expected_body.len(), 1);
let BodyItem::VariableDeclaration(expected) = expected_body.pop().unwrap() else {
@ -2064,7 +2064,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_math_parse() {
let tokens = crate::token::lexer(r#"5 + "a""#);
let tokens = crate::token::lexer(r#"5 + "a""#).unwrap();
let actual = crate::parser::Parser::new(tokens).ast().unwrap().body;
let expr = BinaryExpression {
start: 0,
@ -2172,7 +2172,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_abstract_syntax_tree() {
let code = "5 +6";
let parser = crate::parser::Parser::new(crate::token::lexer(code));
let parser = crate::parser::Parser::new(crate::token::lexer(code).unwrap());
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
@ -2207,11 +2207,10 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_empty_file() {
let some_program_string = r#""#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("file is empty"));
assert!(result.is_ok());
}
#[test]
@ -2220,7 +2219,8 @@ const mySk1 = startSketchAt([0, 0])"#;
"const secondExtrude = startSketchOn('XY')
|> startProfileAt([0,0], %)
|",
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2229,7 +2229,7 @@ const mySk1 = startSketchAt([0, 0])"#;
#[test]
fn test_parse_member_expression_double_nested_braces() {
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#).unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2239,7 +2239,8 @@ const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj.a"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2249,7 +2250,8 @@ const height = 1 - obj.a"#,
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj["a"]"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2259,7 +2261,8 @@ const height = 1 - obj["a"]"#,
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = obj["a"] - 1"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2269,7 +2272,8 @@ const height = obj["a"] - 1"#,
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [1 - obj["a"], 0]"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2279,7 +2283,8 @@ const height = [1 - obj["a"], 0]"#,
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] - 1, 0]"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2289,7 +2294,8 @@ const height = [obj["a"] - 1, 0]"#,
let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] -1, 0]"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2310,7 +2316,8 @@ const firstExtrude = startSketchOn('XY')
const secondExtrude = startSketchOn('XY')
|> startProfileAt([0,0], %)
|",
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2319,7 +2326,7 @@ const secondExtrude = startSketchOn('XY')
#[test]
fn test_parse_greater_bang() {
let tokens = crate::token::lexer(">!");
let tokens = crate::token::lexer(">!").unwrap();
let parser = crate::parser::Parser::new(tokens);
let err = parser.ast().unwrap_err();
assert_eq!(
@ -2330,21 +2337,26 @@ const secondExtrude = startSketchOn('XY')
#[test]
fn test_parse_z_percent_parens() {
let tokens = crate::token::lexer("z%)");
let tokens = crate::token::lexer("z%)").unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
assert_eq!(
result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([1, 2])], message: "Unexpected token" }"#
);
}
#[test]
fn test_parse_parens_unicode() {
let tokens = crate::token::lexer("");
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let result = crate::token::lexer("");
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"lexical: KclErrorDetails { source_ranges: [SourceRange([1, 2])], message: "found unknown token 'ޜ'" }"#
);
}
#[test]
@ -2355,7 +2367,8 @@ const thickness = 0.56
const bracket = [-leg2 + thickness, 0]
"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_ok());
@ -2366,7 +2379,8 @@ const bracket = [-leg2 + thickness, 0]
let tokens = crate::token::lexer(
r#"
z(-[["#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2377,7 +2391,8 @@ z(-[["#,
let tokens = crate::token::lexer(
r#"z
(--#"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2389,7 +2404,7 @@ z(-[["#,
#[test]
fn test_parse_weird_lots_of_fancy_brackets() {
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2405,7 +2420,8 @@ z(-[["#,
r#"fn)n
e
["#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2418,7 +2434,7 @@ e
#[test]
fn test_parse_weird_close_before_nada() {
let tokens = crate::token::lexer(r#"fn)n-"#);
let tokens = crate::token::lexer(r#"fn)n-"#).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2434,7 +2450,8 @@ e
let tokens = crate::token::lexer(
r#"J///////////o//+///////////P++++*++++++P///////˟
++4"#,
);
)
.unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2526,7 +2543,7 @@ e
#[test]
fn test_parse_expand_array() {
let code = "const myArray = [0..10]";
let parser = crate::parser::Parser::new(crate::token::lexer(code));
let parser = crate::parser::Parser::new(crate::token::lexer(code).unwrap());
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
@ -2626,7 +2643,7 @@ e
#[test]
fn test_error_keyword_in_variable() {
let some_program_string = r#"const let = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2639,7 +2656,7 @@ e
#[test]
fn test_error_keyword_in_fn_name() {
let some_program_string = r#"fn let = () {}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2654,7 +2671,7 @@ e
let some_program_string = r#"fn cos = () => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2669,7 +2686,7 @@ e
let some_program_string = r#"fn thing = (let) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2684,7 +2701,7 @@ e
let some_program_string = r#"fn thing = (cos) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2702,7 +2719,7 @@ e
}
firstPrimeNumber()
"#;
let tokens = crate::token::lexer(program);
let tokens = crate::token::lexer(program).unwrap();
let parser = crate::parser::Parser::new(tokens);
let _ast = parser.ast().unwrap();
}
@ -2715,7 +2732,7 @@ e
thing(false)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2732,7 +2749,7 @@ thing(false)
"#,
name
);
let tokens = crate::token::lexer(&some_program_string);
let tokens = crate::token::lexer(&some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2750,7 +2767,7 @@ thing(false)
#[test]
fn test_error_define_var_as_function() {
let some_program_string = r#"fn thing = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -2773,7 +2790,7 @@ thing(false)
|> line([-5.09, 12.33], %)
asdasd
"#;
let tokens = crate::token::lexer(test_program);
let tokens = crate::token::lexer(test_program).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let _e = result.unwrap_err();
@ -2797,7 +2814,7 @@ const b2 = cube([3,3], 4)
const pt1 = b1[0]
const pt2 = b2[0]
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2806,7 +2823,7 @@ const pt2 = b2[0]
fn test_math_with_stdlib() {
let some_program_string = r#"const d2r = pi() / 2
let other_thing = 2 * cos(3)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2826,7 +2843,7 @@ let other_thing = 2 * cos(3)"#;
}
let myBox = box([0,0], -3, -16, -10)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -2836,7 +2853,7 @@ let myBox = box([0,0], -3, -16, -10)
foo()
|> bar(2)
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let err = parser.ast().unwrap_err();
println!("{err}")
@ -2854,7 +2871,7 @@ mod snapshot_math_tests {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
let tokens = crate::token::lexer($test_kcl_program);
let tokens = crate::token::lexer($test_kcl_program).unwrap();
let actual = match binary_expression.parse(&tokens) {
Ok(x) => x,
Err(_e) => panic!("could not parse test"),
@ -2888,7 +2905,7 @@ mod snapshot_tests {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
let tokens = crate::token::lexer($test_kcl_program);
let tokens = crate::token::lexer($test_kcl_program).unwrap();
let actual = match program.parse(&tokens) {
Ok(x) => x,
Err(e) => panic!("could not parse test: {e:?}"),

View File

@ -1,7 +1,11 @@
use winnow::error::{ErrorKind, ParseError, StrContext};
use winnow::{
error::{ErrorKind, ParseError, StrContext},
Located,
};
use crate::{
errors::{KclError, KclErrorDetails},
executor::SourceRange,
token::Token,
};
@ -15,6 +19,22 @@ pub struct ContextError<C = StrContext> {
pub cause: Option<KclError>,
}
impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
fn from(err: ParseError<Located<&str>, winnow::error::ContextError>) -> Self {
let (input, offset): (Vec<char>, usize) = (err.input().chars().collect(), err.offset());
// TODO: Add the Winnow tokenizer context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
let bad_token = &input[offset];
// TODO: Add the Winnow parser context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Lexical(KclErrorDetails {
source_ranges: vec![SourceRange([offset, offset + 1])],
message: format!("found unknown token '{}'", bad_token),
})
}
}
impl From<ParseError<&[Token], ContextError>> for KclError {
fn from(err: ParseError<&[Token], ContextError>) -> Self {
let Some(last_token) = err.input().last() else {
@ -47,7 +67,7 @@ impl From<ParseError<&[Token], ContextError>> for KclError {
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Syntax(KclErrorDetails {
source_ranges: bad_token.as_source_ranges(),
message: "Unexpected token".to_owned(),
message: "Unexpected token".to_string(),
})
}
}

View File

@ -72,7 +72,7 @@ impl Serialize for Box<dyn KclStdLibFn> {
/// Return the program and its single function.
/// Return None if those expectations aren't met.
pub fn extract_function(source: &str) -> Option<(Program, Box<FunctionExpression>)> {
let tokens = lexer(source);
let tokens = lexer(source).unwrap();
let src = crate::parser::Parser::new(tokens).ast().ok()?;
assert_eq!(src.body.len(), 1);
let BodyItem::ExpressionStatement(expr) = src.body.last()? else {

View File

@ -961,54 +961,25 @@ async fn start_sketch_on_face(
async fn start_sketch_on_plane(data: PlaneData, args: Args) -> Result<Box<Plane>, KclError> {
let mut plane: Plane = data.clone().into();
let id = uuid::Uuid::new_v4();
let default_origin = Point3D { x: 0.0, y: 0.0, z: 0.0 };
let (x_axis, y_axis) = match data {
PlaneData::XY => (Point3D { x: 1.0, y: 0.0, z: 0.0 }, Point3D { x: 0.0, y: 1.0, z: 0.0 }),
PlaneData::NegXY => (
Point3D {
x: -1.0,
y: 0.0,
z: 0.0,
},
Point3D { x: 0.0, y: 1.0, z: 0.0 },
),
PlaneData::XZ => (
Point3D {
x: -1.0,
y: 0.0,
z: 0.0,
},
Point3D { x: 0.0, y: 0.0, z: 1.0 },
), // TODO x component for x_axis shouldn't be negative
PlaneData::NegXZ => (
Point3D {
x: 1.0, // TODO this should be -1.0
y: 0.0,
z: 0.0,
},
Point3D { x: 0.0, y: 0.0, z: 1.0 },
),
PlaneData::YZ => (Point3D { x: 0.0, y: 1.0, z: 0.0 }, Point3D { x: 0.0, y: 0.0, z: 1.0 }),
PlaneData::NegYZ => (
Point3D {
x: 0.0,
y: -1.0,
z: 0.0,
},
Point3D { x: 0.0, y: 0.0, z: 1.0 },
),
_ => (Point3D { x: 1.0, y: 0.0, z: 0.0 }, Point3D { x: 0.0, y: 1.0, z: 0.0 }),
};
// Get the default planes.
let default_planes = args.ctx.engine.default_planes(args.source_range).await?;
plane.id = match data {
PlaneData::XY => default_planes.xy,
PlaneData::XZ => default_planes.xz,
PlaneData::YZ => default_planes.yz,
PlaneData::NegXY => default_planes.neg_xy,
PlaneData::NegXZ => default_planes.neg_xz,
PlaneData::NegYZ => default_planes.neg_yz,
PlaneData::Plane {
origin,
x_axis,
y_axis,
z_axis: _,
} => {
// Create the custom plane on the fly.
let id = uuid::Uuid::new_v4();
args.send_modeling_cmd(
id,
ModelingCmd::MakePlane {
@ -1021,21 +992,7 @@ async fn start_sketch_on_plane(data: PlaneData, args: Args) -> Result<Box<Plane>
},
)
.await?;
id
}
_ => {
args.send_modeling_cmd(
id,
ModelingCmd::MakePlane {
clobber: false,
origin: default_origin,
size: 60.0,
x_axis,
y_axis,
hide: Some(true),
},
)
.await?;
id
}
};

View File

@ -0,0 +1,26 @@
//! Local implementation of threads with tokio.
pub struct JoinHandle {
inner: tokio::task::JoinHandle<()>,
}
impl JoinHandle {
pub fn new<F>(future: F) -> Self
where
F: std::future::Future<Output = ()> + Send + 'static,
{
Self {
inner: tokio::spawn(future),
}
}
}
impl crate::thread::Thread for JoinHandle {
fn abort(&self) {
self.inner.abort();
}
fn is_finished(&self) -> bool {
self.inner.is_finished()
}
}

View File

@ -0,0 +1,22 @@
//! An implementation of threads that works in wasm with promises and other platforms with tokio.
#[cfg(not(target_arch = "wasm32"))]
pub mod local;
#[cfg(not(target_arch = "wasm32"))]
pub use local::JoinHandle;
#[cfg(target_arch = "wasm32")]
#[cfg(not(test))]
pub mod wasm;
#[cfg(target_arch = "wasm32")]
#[cfg(not(test))]
pub use wasm::JoinHandle;
pub trait Thread {
/// Abort a thread.
fn abort(&self);
/// Check if a thread is finished.
fn is_finished(&self) -> bool;
}

View File

@ -0,0 +1,36 @@
//! Implementation of thread using Promise for wasm.
pub struct JoinHandle {
inner: Option<crate::wasm::Promise>,
}
impl JoinHandle {
pub fn new<F>(future: F) -> Self
where
F: std::future::Future<Output = ()> + Send + 'static,
{
Self {
inner: Some(
wasm_bindgen_futures::future_to_promise(async move {
future.await;
Ok(wasm_bindgen::JsValue::NULL)
})
.into(),
),
}
}
}
impl crate::thread::Thread for JoinHandle {
fn abort(&self) {
if let Some(promise) = &self.inner {
let future = crate::wasm::JsFuture::from(promise.0.as_ref().unwrap().clone());
drop(future);
}
}
fn is_finished(&self) -> bool {
// no-op for now but we don't need it.
true
}
}

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
use winnow::stream::ContainsToken;
use crate::{ast::types::VariableKind, executor::SourceRange};
use crate::{ast::types::VariableKind, errors::KclError, executor::SourceRange};
mod tokeniser;
@ -125,6 +125,14 @@ impl TokenType {
Ok(semantic_tokens)
}
pub fn is_whitespace(&self) -> bool {
matches!(self, Self::Whitespace)
}
pub fn is_comment(&self) -> bool {
matches!(self, Self::LineComment | Self::BlockComment)
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
@ -204,8 +212,8 @@ impl From<&Token> for SourceRange {
}
}
pub fn lexer(s: &str) -> Vec<Token> {
tokeniser::lexer(s).unwrap_or_default()
pub fn lexer(s: &str) -> Result<Vec<Token>, KclError> {
tokeniser::lexer(s).map_err(From::from)
}
#[cfg(test)]

View File

@ -5,7 +5,7 @@ use std::{
};
/// A JsFuture that implements Send and Sync.
pub struct JsFuture(pub wasm_bindgen_futures::JsFuture);
pub struct JsFuture(pub Option<wasm_bindgen_futures::JsFuture>);
// Safety: WebAssembly will only ever run in a single-threaded context.
unsafe impl Send for JsFuture {}
@ -15,13 +15,52 @@ impl std::future::Future for JsFuture {
type Output = Result<wasm_bindgen::JsValue, wasm_bindgen::JsValue>;
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
let mut pinned: Pin<&mut wasm_bindgen_futures::JsFuture> = Pin::new(&mut self.get_mut().0);
pinned.as_mut().poll(cx)
if let Some(future) = &mut self.get_mut().0 {
let mut pinned = std::pin::pin!(future);
match pinned.as_mut().poll(cx) {
Poll::Ready(Ok(value)) => Poll::Ready(Ok(value)),
Poll::Ready(Err(err)) => Poll::Ready(Err(err)),
Poll::Pending => Poll::Pending,
}
} else {
Poll::Ready(Err(wasm_bindgen::JsValue::from_str("Future has already been dropped")))
}
}
}
impl Drop for JsFuture {
fn drop(&mut self) {
if let Some(t) = self.0.take() {
drop(t);
}
}
}
impl From<js_sys::Promise> for JsFuture {
fn from(promise: js_sys::Promise) -> JsFuture {
JsFuture(wasm_bindgen_futures::JsFuture::from(promise))
JsFuture(Some(wasm_bindgen_futures::JsFuture::from(promise)))
}
}
/// A Promise that implements Send and Sync.
pub struct Promise(pub Option<js_sys::Promise>);
// Safety: WebAssembly will only ever run in a single-threaded context.
unsafe impl Send for Promise {}
unsafe impl Sync for Promise {}
impl From<js_sys::Promise> for Promise {
fn from(promise: js_sys::Promise) -> Promise {
Promise(Some(promise))
}
}
impl Drop for Promise {
fn drop(&mut self) {
// Turn it into a future and drop it.
if let Some(t) = self.0.take() {
let future = wasm_bindgen_futures::JsFuture::from(t);
drop(future);
}
}
}

View File

@ -24,7 +24,6 @@ pub async fn execute_wasm(
console_error_panic_hook::set_once();
// deserialize the ast from a stringified json
use kcl_lib::executor::ExecutorContext;
let program: kcl_lib::ast::types::Program = serde_json::from_str(program_str).map_err(|e| e.to_string())?;
let memory: kcl_lib::executor::ProgramMemory = serde_json::from_str(memory_str).map_err(|e| e.to_string())?;
let units = kittycad::types::UnitLength::from_str(units).map_err(|e| e.to_string())?;
@ -32,8 +31,8 @@ pub async fn execute_wasm(
let engine = kcl_lib::engine::conn_wasm::EngineConnection::new(engine_manager)
.await
.map_err(|e| format!("{:?}", e))?;
let fs = kcl_lib::fs::FileManager::new(fs_manager);
let ctx = ExecutorContext {
let fs = Arc::new(kcl_lib::fs::FileManager::new(fs_manager));
let ctx = kcl_lib::executor::ExecutorContext {
engine: Arc::new(Box::new(engine)),
fs,
stdlib: std::sync::Arc::new(kcl_lib::std::StdLib::new()),
@ -47,6 +46,27 @@ pub async fn execute_wasm(
JsValue::from_serde(&memory).map_err(|e| e.to_string())
}
// wasm_bindgen wrapper for creating default planes
#[wasm_bindgen]
pub async fn make_default_planes(
engine_manager: kcl_lib::engine::conn_wasm::EngineCommandManager,
) -> Result<JsValue, String> {
console_error_panic_hook::set_once();
// deserialize the ast from a stringified json
let engine = kcl_lib::engine::conn_wasm::EngineConnection::new(engine_manager)
.await
.map_err(|e| format!("{:?}", e))?;
let default_planes = engine
.new_default_planes(Default::default())
.await
.map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
// gloo-serialize crate instead.
JsValue::from_serde(&default_planes).map_err(|e| e.to_string())
}
// wasm_bindgen wrapper for execute
#[wasm_bindgen]
pub async fn modify_ast_for_sketch_wasm(
@ -109,7 +129,7 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
console_error_panic_hook::set_once();
let tokens = kcl_lib::token::lexer(js);
let tokens = kcl_lib::token::lexer(js).map_err(JsError::from)?;
Ok(JsValue::from_serde(&tokens)?)
}
@ -117,7 +137,7 @@ pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
pub fn parse_wasm(js: &str) -> Result<JsValue, String> {
console_error_panic_hook::set_once();
let tokens = kcl_lib::token::lexer(js);
let tokens = kcl_lib::token::lexer(js).map_err(String::from)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast().map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
@ -170,7 +190,13 @@ impl ServerConfig {
// NOTE: input needs to be an AsyncIterator<Uint8Array, never, void> specifically
#[wasm_bindgen]
pub async fn kcl_lsp_run(config: ServerConfig, token: String, is_dev: bool) -> Result<(), JsValue> {
pub async fn kcl_lsp_run(
config: ServerConfig,
engine_manager: kcl_lib::engine::conn_wasm::EngineCommandManager,
units: &str,
token: String,
is_dev: bool,
) -> Result<(), JsValue> {
console_error_panic_hook::set_once();
let ServerConfig {
@ -190,6 +216,22 @@ pub async fn kcl_lsp_run(config: ServerConfig, token: String, is_dev: bool) -> R
if is_dev {
zoo_client.set_base_url("https://api.dev.zoo.dev");
}
let file_manager = Arc::new(kcl_lib::fs::FileManager::new(fs));
let units = kittycad::types::UnitLength::from_str(units).map_err(|e| e.to_string())?;
let engine = kcl_lib::engine::conn_wasm::EngineConnection::new(engine_manager)
.await
.map_err(|e| format!("{:?}", e))?;
// Turn off lsp execute for now
let _executor_ctx = kcl_lib::executor::ExecutorContext {
engine: Arc::new(Box::new(engine)),
fs: file_manager.clone(),
stdlib: std::sync::Arc::new(stdlib),
units,
is_mock: false,
};
// Check if we can send telememtry for this user.
let privacy_settings = match zoo_client.users().get_privacy_settings().await {
Ok(privacy_settings) => privacy_settings,
@ -210,7 +252,7 @@ pub async fn kcl_lsp_run(config: ServerConfig, token: String, is_dev: bool) -> R
let (service, socket) = LspService::build(|client| kcl_lib::lsp::kcl::Backend {
client,
fs: kcl_lib::fs::FileManager::new(fs),
fs: file_manager,
workspace_folders: Default::default(),
stdlib_completions,
stdlib_signatures,
@ -218,15 +260,20 @@ pub async fn kcl_lsp_run(config: ServerConfig, token: String, is_dev: bool) -> R
token_map: Default::default(),
ast_map: Default::default(),
memory_map: Default::default(),
current_code_map: Default::default(),
code_map: Default::default(),
diagnostics_map: Default::default(),
symbols_map: Default::default(),
semantic_tokens_map: Default::default(),
zoo_client,
can_send_telemetry: privacy_settings.can_train_on_data,
executor_ctx: Default::default(),
can_execute: Default::default(),
is_initialized: Default::default(),
current_handle: Default::default(),
})
.custom_method("kcl/updateUnits", kcl_lib::lsp::kcl::Backend::update_units)
.custom_method("kcl/updateCanExecute", kcl_lib::lsp::kcl::Backend::update_can_execute)
.finish();
let input = wasm_bindgen_futures::stream::JsStream::from(into_server);
@ -271,15 +318,20 @@ pub async fn copilot_lsp_run(config: ServerConfig, token: String, is_dev: bool)
zoo_client.set_base_url("https://api.dev.zoo.dev");
}
let file_manager = Arc::new(kcl_lib::fs::FileManager::new(fs));
let (service, socket) = LspService::build(|client| kcl_lib::lsp::copilot::Backend {
client,
fs: kcl_lib::fs::FileManager::new(fs),
fs: file_manager,
workspace_folders: Default::default(),
current_code_map: Default::default(),
code_map: Default::default(),
editor_info: Arc::new(RwLock::new(kcl_lib::lsp::copilot::types::CopilotEditorInfo::default())),
cache: Arc::new(kcl_lib::lsp::copilot::cache::CopilotCache::new()),
telemetry: Default::default(),
zoo_client,
is_initialized: Default::default(),
current_handle: Default::default(),
})
.custom_method("copilot/setEditorInfo", kcl_lib::lsp::copilot::Backend::set_editor_info)
.custom_method(

View File

@ -35,7 +35,7 @@ async fn execute_and_snapshot(code: &str, units: kittycad::types::UnitLength) ->
// Create a temporary file to write the output to.
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
let tokens = kcl_lib::token::lexer(code);
let tokens = kcl_lib::token::lexer(code)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let ctx = kcl_lib::executor::ExecutorContext::new(ws, units.clone()).await?;
@ -1927,6 +1927,14 @@ const plumbus0 = make_circle(p, 'a', [0, 0], 2.5)
twenty_twenty::assert_image("tests/executor/outputs/plumbus_fillets.png", &result, 1.0);
}
#[tokio::test(flavor = "multi_thread")]
async fn serial_test_empty_file_is_ok() {
let code = r#""#;
let result = execute_and_snapshot(code, kittycad::types::UnitLength::Mm).await;
assert!(result.is_ok());
}
#[tokio::test(flavor = "multi_thread")]
async fn serial_test_member_expression_in_params() {
let code = r#"fn capScrew = (originStart, length, dia, capDia, capHeadLength) => {

View File

@ -36,7 +36,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Program, uuid
.commands_ws(None, None, None, None, None, None, Some(false))
.await?;
let tokens = kcl_lib::token::lexer(code);
let tokens = kcl_lib::token::lexer(code)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let ctx = kcl_lib::executor::ExecutorContext::new(ws, kittycad::types::UnitLength::Mm).await?;