Compare commits

..

1 Commits

Author SHA1 Message Date
534d5431d4 Only run regex once per token 2023-09-21 16:49:34 -05:00
52 changed files with 1130 additions and 2304 deletions

View File

@ -123,24 +123,13 @@ Before you submit a contribution PR to this repo, please ensure that:
## Release a new version
1. Bump the versions in the .json files by creating a `Cut release v{x}.{y}.{z}` PR, committing the changes from
1. Bump the versions in the .json files by creating a `Bump to v{x}.{y}.{z}` PR, committing the changes from
```bash
VERSION=x.y.z yarn run bump-jsons
```
The PR may serve as a place to discuss the human-readable changelog and extra QA. A quick way of getting PR's merged since the last bump is to [use this PR filter](https://github.com/KittyCAD/modeling-app/pulls?q=is%3Apr+sort%3Aupdated-desc+is%3Amerged+), open up the browser console and past in the following
```typescript
console.log(
'- ' +
Array.from(
document.querySelectorAll('[data-hovercard-type="pull_request"]')
).map((a) => `[${a.innerText}](${a.href})`).join(`
- `)
)
```
grab the md list and delete any that are older than the last bump
The PR may serve as a place to discuss the human-readable changelog and extra QA.
2. Merge the PR

View File

@ -1,6 +1,6 @@
{
"name": "untitled-app",
"version": "0.9.2",
"version": "0.9.0",
"private": true,
"dependencies": {
"@codemirror/autocomplete": "^6.9.0",
@ -10,7 +10,7 @@
"@fortawesome/react-fontawesome": "^0.2.0",
"@headlessui/react": "^1.7.13",
"@headlessui/tailwindcss": "^0.2.0",
"@kittycad/lib": "^0.0.39",
"@kittycad/lib": "^0.0.38",
"@lezer/javascript": "^1.4.7",
"@open-rpc/client-js": "^1.8.1",
"@react-hook/resize-observer": "^1.2.6",
@ -27,7 +27,6 @@
"@uiw/react-codemirror": "^4.21.13",
"@xstate/react": "^3.2.2",
"crypto-js": "^4.1.1",
"debounce-promise": "^3.1.2",
"formik": "^2.4.3",
"fuse.js": "^6.6.2",
"http-server": "^14.1.1",
@ -102,7 +101,7 @@
"@babel/preset-env": "^7.22.9",
"@tauri-apps/cli": "^1.3.1",
"@types/crypto-js": "^4.1.1",
"@types/debounce-promise": "^3.1.6",
"@types/debounce": "^1.2.1",
"@types/isomorphic-fetch": "^0.0.36",
"@types/react-modal": "^3.16.0",
"@types/uuid": "^9.0.1",

51
src-tauri/Cargo.lock generated
View File

@ -155,20 +155,6 @@ version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]]
name = "bigdecimal"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "454bca3db10617b88b566f205ed190aedb0e0e6dd4cad61d3988a72e8c5594cb"
dependencies = [
"autocfg",
"libm",
"num-bigint",
"num-integer",
"num-traits",
"serde",
]
[[package]]
name = "bincode"
version = "1.3.3"
@ -1644,14 +1630,13 @@ dependencies = [
[[package]]
name = "kittycad"
version = "0.2.26"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2623ee601ce203476229df3f9d3a14664cb43e3f7455e9ac8ed91aacaa6163d"
checksum = "d9cf962b1e81a0b4eb923a727e761b40672cbacc7f5f0b75e13579d346352bc7"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.2",
"bigdecimal",
"bytes",
"chrono",
"data-encoding",
@ -1703,12 +1688,6 @@ version = "0.2.148"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
[[package]]
name = "libm"
version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4"
[[package]]
name = "line-wrap"
version = "0.1.1"
@ -1980,17 +1959,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "num-bigint"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.45"
@ -2014,9 +1982,9 @@ dependencies = [
[[package]]
name = "num-traits"
version = "0.2.16"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2"
checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [
"autocfg",
]
@ -3054,11 +3022,10 @@ dependencies = [
[[package]]
name = "schemars"
version = "0.8.15"
version = "0.8.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c"
checksum = "763f8cd0d4c71ed8389c90cb8100cba87e763bd01a8e614d4f0af97bcd50a161"
dependencies = [
"bigdecimal",
"bytes",
"chrono",
"dyn-clone",
@ -3071,9 +3038,9 @@ dependencies = [
[[package]]
name = "schemars_derive"
version = "0.8.15"
version = "0.8.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c"
checksum = "ec0f696e21e10fa546b7ffb1c9672c6de8fbc7a81acf59524386d8639bf12737"
dependencies = [
"proc-macro2",
"quote",
@ -3775,7 +3742,7 @@ dependencies = [
[[package]]
name = "tauri-plugin-fs-extra"
version = "0.0.0"
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#b04bde3461066c709d6801cf9ca305cf889a8394"
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#76832e60bfba44c24d6af8a5099be123886ba63d"
dependencies = [
"log",
"serde",

View File

@ -16,7 +16,7 @@ tauri-build = { version = "1.4.0", features = [] }
[dependencies]
anyhow = "1"
kittycad = "0.2.26"
kittycad = "0.2.25"
oauth2 = "4.4.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -8,7 +8,7 @@
},
"package": {
"productName": "kittycad-modeling",
"version": "0.9.2"
"version": "0.9.0"
},
"tauri": {
"allowlist": {

View File

@ -31,7 +31,6 @@ import { TextEditor } from 'components/TextEditor'
import { Themes, getSystemTheme } from 'lib/theme'
import { useSetupEngineManager } from 'hooks/useSetupEngineManager'
import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions'
import { engineCommandManager } from './lang/std/engineConnection'
export function App() {
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
@ -40,6 +39,7 @@ export function App() {
useHotKeyListener()
const {
setCode,
engineCommandManager,
buttonDownInStream,
openPanes,
setOpenPanes,
@ -52,6 +52,7 @@ export function App() {
guiMode: s.guiMode,
setGuiMode: s.setGuiMode,
setCode: s.setCode,
engineCommandManager: s.engineCommandManager,
buttonDownInStream: s.buttonDownInStream,
openPanes: s.openPanes,
setOpenPanes: s.setOpenPanes,
@ -90,12 +91,12 @@ export function App() {
if (guiMode.sketchMode === 'sketchEdit') {
// TODO: share this with Toolbar's "Exit sketch" button
// exiting sketch should be done consistently across all exits
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' },
})
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' },
@ -106,7 +107,7 @@ export function App() {
// when exiting sketch mode in the future
executeAst()
} else {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
@ -155,7 +156,7 @@ export function App() {
useEngineConnectionSubscriptions()
const debounceSocketSend = throttle<EngineCommand>((message) => {
engineCommandManager.sendSceneCommand(message)
engineCommandManager?.sendSceneCommand(message)
}, 16)
const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => {
e.nativeEvent.preventDefault()
@ -215,6 +216,7 @@ export function App() {
} else if (interactionGuards.zoom.dragCallback(eWithButton)) {
interaction = 'zoom'
} else {
console.log('none')
return
}

View File

@ -18,7 +18,6 @@ import styles from './Toolbar.module.css'
import { v4 as uuidv4 } from 'uuid'
import { useAppMode } from 'hooks/useAppMode'
import { ActionIcon } from 'components/ActionIcon'
import { engineCommandManager } from './lang/std/engineConnection'
export const sketchButtonClassnames = {
background:
@ -51,6 +50,7 @@ export const Toolbar = () => {
ast,
updateAst,
programMemory,
engineCommandManager,
executeAst,
} = useStore((s) => ({
guiMode: s.guiMode,
@ -59,10 +59,15 @@ export const Toolbar = () => {
ast: s.ast,
updateAst: s.updateAst,
programMemory: s.programMemory,
engineCommandManager: s.engineCommandManager,
executeAst: s.executeAst,
}))
useAppMode()
useEffect(() => {
console.log('guiMode', guiMode)
}, [guiMode])
function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) {
return (
<span className={styles.toolbarButtons + ' ' + className}>
@ -168,12 +173,12 @@ export const Toolbar = () => {
{guiMode.mode === 'sketch' && (
<button
onClick={() => {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' },
})
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' },
@ -209,7 +214,7 @@ export const Toolbar = () => {
<button
key={sketchFnName}
onClick={() => {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {

View File

@ -10,7 +10,6 @@ import {
} from '../lang/modifyAst'
import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst'
import { useStore } from '../useStore'
import { engineCommandManager } from '../lang/std/engineConnection'
export const AvailableVars = ({
onVarClick,
@ -93,11 +92,14 @@ export function useCalc({
newVariableInsertIndex: number
setNewVariableName: (a: string) => void
} {
const { ast, programMemory, selectionRange } = useStore((s) => ({
ast: s.ast,
programMemory: s.programMemory,
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
}))
const { ast, programMemory, selectionRange, engineCommandManager } = useStore(
(s) => ({
ast: s.ast,
programMemory: s.programMemory,
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
engineCommandManager: s.engineCommandManager,
})
)
const inputRef = useRef<HTMLInputElement>(null)
const [availableVarInfo, setAvailableVarInfo] = useState<
ReturnType<typeof findAllPreviousVariables>
@ -138,6 +140,7 @@ export function useCalc({
}, [ast, programMemory, selectionRange])
useEffect(() => {
if (!engineCommandManager) return
try {
const code = `const __result__ = ${value}\nshow(__result__)`
const ast = parser_wasm(code)

View File

@ -1,4 +1,5 @@
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
import { useStore } from '../useStore'
import { v4 as uuidv4 } from 'uuid'
import { EngineCommand } from '../lang/std/engineConnection'
import { useState } from 'react'
@ -6,7 +7,6 @@ import { ActionButton } from '../components/ActionButton'
import { faCheck } from '@fortawesome/free-solid-svg-icons'
import { isReducedMotion } from 'lang/util'
import { AstExplorer } from './AstExplorer'
import { engineCommandManager } from '../lang/std/engineConnection'
type SketchModeCmd = Extract<
Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'],
@ -14,6 +14,9 @@ type SketchModeCmd = Extract<
>
export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
const { engineCommandManager } = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
}))
const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({
type: 'default_camera_enable_sketch_mode',
origin: { x: 0, y: 0, z: 0 },
@ -67,18 +70,19 @@ export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
className="w-16"
type="checkbox"
checked={sketchModeCmd.ortho}
onChange={(a) =>
onChange={(a) => {
console.log(a, (a as any).checked)
setSketchModeCmd({
...sketchModeCmd,
ortho: a.target.checked,
})
}
}}
/>
</div>
<ActionButton
Element="button"
onClick={() => {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: sketchModeCmd,
cmd_id: uuidv4(),

View File

@ -1,11 +1,11 @@
import { v4 as uuidv4 } from 'uuid'
import { useStore } from '../useStore'
import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons'
import { ActionButton } from './ActionButton'
import Modal from 'react-modal'
import React from 'react'
import { useFormik } from 'formik'
import { Models } from '@kittycad/lib'
import { engineCommandManager } from '../lang/std/engineConnection'
type OutputFormat = Models['OutputFormat_type']
@ -18,6 +18,10 @@ interface ExportButtonProps extends React.PropsWithChildren {
}
export const ExportButton = ({ children, className }: ExportButtonProps) => {
const { engineCommandManager } = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
}))
const [modalIsOpen, setIsOpen] = React.useState(false)
const defaultType = 'gltf'
@ -62,7 +66,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
},
}
}
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'export',

View File

@ -25,7 +25,6 @@ import { modify_ast_for_sketch } from '../wasm-lib/pkg/wasm_lib'
import { KCLError } from 'lang/errors'
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
import { rangeTypeFix } from 'lang/abstractSyntaxTree'
import { engineCommandManager } from '../lang/std/engineConnection'
export const Stream = ({ className = '' }) => {
const [isLoading, setIsLoading] = useState(true)
@ -33,6 +32,7 @@ export const Stream = ({ className = '' }) => {
const videoRef = useRef<HTMLVideoElement>(null)
const {
mediaStream,
engineCommandManager,
setButtonDownInStream,
didDragInStream,
setDidDragInStream,
@ -45,6 +45,7 @@ export const Stream = ({ className = '' }) => {
programMemory,
} = useStore((s) => ({
mediaStream: s.mediaStream,
engineCommandManager: s.engineCommandManager,
setButtonDownInStream: s.setButtonDownInStream,
fileId: s.fileId,
didDragInStream: s.didDragInStream,
@ -72,7 +73,7 @@ export const Stream = ({ className = '' }) => {
if (!videoRef.current) return
if (!mediaStream) return
videoRef.current.srcObject = mediaStream
}, [mediaStream])
}, [mediaStream, engineCommandManager])
const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => {
if (!videoRef.current) return
@ -106,7 +107,7 @@ export const Stream = ({ className = '' }) => {
}
if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'handle_mouse_drag_start',
@ -120,7 +121,7 @@ export const Stream = ({ className = '' }) => {
guiMode.sketchMode === ('sketch_line' as any)
)
) {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'camera_drag_start',
@ -138,7 +139,7 @@ export const Stream = ({ className = '' }) => {
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'default_camera_zoom',
@ -176,7 +177,7 @@ export const Stream = ({ className = '' }) => {
}
if (!didDragInStream) {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'select_with_point',
@ -213,7 +214,7 @@ export const Stream = ({ className = '' }) => {
window: { x, y },
}
}
engineCommandManager.sendSceneCommand(command).then(async (resp) => {
engineCommandManager?.sendSceneCommand(command).then(async (resp) => {
if (!(guiMode.mode === 'sketch')) return
if (guiMode.sketchMode === 'selectFace') return
@ -239,6 +240,9 @@ export const Stream = ({ className = '' }) => {
) {
// Let's get the updated ast.
if (sketchGroupId === '') return
console.log('guiMode.pathId', guiMode.pathId)
// We have a problem if we do not have an id for the sketch group.
if (
guiMode.pathId === undefined ||
@ -281,7 +285,7 @@ export const Stream = ({ className = '' }) => {
guiMode.waitingFirstClick &&
!isEditingExistingSketch
) {
const curve = await engineCommandManager.sendSceneCommand({
const curve = await engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
@ -322,7 +326,7 @@ export const Stream = ({ className = '' }) => {
resp?.data?.data?.entities_modified?.length &&
(!guiMode.waitingFirstClick || isEditingExistingSketch)
) {
const curve = await engineCommandManager.sendSceneCommand({
const curve = await engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
@ -367,12 +371,12 @@ export const Stream = ({ className = '' }) => {
setGuiMode({
mode: 'default',
})
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' },
})
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' },

View File

@ -30,7 +30,6 @@ import { isOverlap, roundOff } from 'lib/utils'
import { kclErrToDiagnostic } from 'lang/errors'
import { CSSRuleObject } from 'tailwindcss/types/config'
import interact from '@replit/codemirror-interact'
import { engineCommandManager } from '../lang/std/engineConnection'
export const editorShortcutMeta = {
formatCode: {
@ -53,6 +52,7 @@ export const TextEditor = ({
code,
deferredSetCode,
editorView,
engineCommandManager,
formatCode,
isLSPServerReady,
selectionRanges,
@ -64,6 +64,7 @@ export const TextEditor = ({
code: s.code,
deferredSetCode: s.deferredSetCode,
editorView: s.editorView,
engineCommandManager: s.engineCommandManager,
formatCode: s.formatCode,
isLSPServerReady: s.isLSPServerReady,
selectionRanges: s.selectionRanges,
@ -172,7 +173,7 @@ export const TextEditor = ({
const idBasedSelections = codeBasedSelections
.map(({ type, range }) => {
const hasOverlap = Object.entries(
engineCommandManager.sourceRangeMap || {}
engineCommandManager?.sourceRangeMap || {}
).filter(([_, sourceRange]) => {
return isOverlap(sourceRange, range)
})
@ -185,7 +186,7 @@ export const TextEditor = ({
})
.filter(Boolean) as any
engineCommandManager.cusorsSelected({
engineCommandManager?.cusorsSelected({
otherSelections: [],
idBasedSelections,
})

View File

@ -133,7 +133,7 @@ export const SetAbsDistance = ({ buttonType }: { buttonType: ButtonType }) => {
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
})
} catch (e) {
console.log('error', e)
console.log('e', e)
}
}}
disabled={!enableAngLen}

View File

@ -147,7 +147,7 @@ export const SetAngleLength = ({
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
})
} catch (e) {
console.log('erorr', e)
console.log('e', e)
}
}}
disabled={!enableAngLen}

View File

@ -109,6 +109,7 @@ export default class Client extends jsrpc.JSONRPCServerAndClient {
}
}
messageString += message
// console.log(messageString)
return
})

View File

@ -13,7 +13,6 @@ import {
CompletionItemKind,
CompletionTriggerKind,
} from 'vscode-languageserver-protocol'
import debounce from 'debounce-promise'
import type {
Completion,
@ -54,11 +53,14 @@ export class LanguageServerPlugin implements PluginValue {
private languageId: string
private documentVersion: number
private changesTimeout: number
constructor(private view: EditorView, private allowHTMLContent: boolean) {
this.client = this.view.state.facet(client)
this.documentUri = this.view.state.facet(documentUri)
this.languageId = this.view.state.facet(languageId)
this.documentVersion = 0
this.changesTimeout = 0
this.client.attachPlugin(this)
@ -69,10 +71,12 @@ export class LanguageServerPlugin implements PluginValue {
update({ docChanged }: ViewUpdate) {
if (!docChanged) return
this.sendChange({
documentText: this.view.state.doc.toString(),
})
if (this.changesTimeout) clearTimeout(this.changesTimeout)
this.changesTimeout = window.setTimeout(() => {
this.sendChange({
documentText: this.view.state.doc.toString(),
})
}, changesDelay)
}
destroy() {
@ -95,32 +99,14 @@ export class LanguageServerPlugin implements PluginValue {
async sendChange({ documentText }: { documentText: string }) {
if (!this.client.ready) return
if (documentText.length > 5000) {
// Clear out the text it thinks we have, large documents will throw a stack error.
// This is obviously not a good fix but it works for now til we figure
// out the stack limits in wasm and also rewrite the parser.
// Since this is only for hover and completions it will be fine,
// completions will still work for stdlib but hover will not.
// That seems like a fine trade-off for a working editor for the time
// being.
documentText = ''
}
try {
debounce(
() => {
return this.client.textDocumentDidChange({
textDocument: {
uri: this.documentUri,
version: this.documentVersion++,
},
contentChanges: [{ text: documentText }],
})
await this.client.textDocumentDidChange({
textDocument: {
uri: this.documentUri,
version: this.documentVersion++,
},
changesDelay,
{ leading: true }
)
contentChanges: [{ text: documentText }],
})
} catch (e) {
console.error(e)
}

View File

@ -8,7 +8,6 @@ import { ArtifactMap, EngineCommandManager } from 'lang/std/engineConnection'
import { Models } from '@kittycad/lib/dist/types/src'
import { isReducedMotion } from 'lang/util'
import { isOverlap } from 'lib/utils'
import { engineCommandManager } from '../lang/std/engineConnection'
interface DefaultPlanes {
xy: string
@ -18,13 +17,19 @@ interface DefaultPlanes {
}
export function useAppMode() {
const { guiMode, setGuiMode, selectionRanges, selectionRangeTypeMap } =
useStore((s) => ({
guiMode: s.guiMode,
setGuiMode: s.setGuiMode,
selectionRanges: s.selectionRanges,
selectionRangeTypeMap: s.selectionRangeTypeMap,
}))
const {
guiMode,
setGuiMode,
selectionRanges,
engineCommandManager,
selectionRangeTypeMap,
} = useStore((s) => ({
guiMode: s.guiMode,
setGuiMode: s.setGuiMode,
selectionRanges: s.selectionRanges,
engineCommandManager: s.engineCommandManager,
selectionRangeTypeMap: s.selectionRangeTypeMap,
}))
const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null)
useEffect(() => {
if (
@ -60,7 +65,7 @@ export function useAppMode() {
setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true)
// TODO figure out the plane to use based on the sketch
// maybe it's easier to make a new plane than rely on the defaults
await engineCommandManager.sendSceneCommand({
await engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
@ -130,7 +135,7 @@ export function useAppMode() {
])
useEffect(() => {
const unSub = engineCommandManager.subscribeTo({
const unSub = engineCommandManager?.subscribeTo({
event: 'select_with_point',
callback: async ({ data }) => {
if (!data.entity_id) return
@ -139,16 +144,18 @@ export function useAppMode() {
// user clicked something else in the scene
return
}
const sketchModeResponse = await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'sketch_mode_enable',
plane_id: data.entity_id,
ortho: true,
animated: !isReducedMotion(),
},
})
const sketchModeResponse = await engineCommandManager?.sendSceneCommand(
{
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'sketch_mode_enable',
plane_id: data.entity_id,
ortho: true,
animated: !isReducedMotion(),
},
}
)
setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true)
const sketchUuid = uuidv4()
const proms: any[] = []
@ -171,7 +178,8 @@ export function useAppMode() {
},
})
)
await Promise.all(proms)
const res = await Promise.all(proms)
console.log('res', res)
setGuiMode({
mode: 'sketch',
sketchMode: 'sketchEdit',
@ -201,7 +209,7 @@ async function createPlane(
}
) {
const planeId = uuidv4()
await engineCommandManager.sendSceneCommand({
await engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'make_plane',
@ -213,7 +221,7 @@ async function createPlane(
},
cmd_id: planeId,
})
await engineCommandManager.sendSceneCommand({
await engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd: {
type: 'plane_set_color',
@ -226,12 +234,12 @@ async function createPlane(
}
function setDefaultPlanesHidden(
engineCommandManager: EngineCommandManager,
engineCommandManager: EngineCommandManager | undefined,
defaultPlanes: DefaultPlanes,
hidden: boolean
) {
Object.values(defaultPlanes).forEach((planeId) => {
engineCommandManager.sendSceneCommand({
engineCommandManager?.sendSceneCommand({
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {

View File

@ -1,9 +1,14 @@
import { useEffect } from 'react'
import { useStore } from 'useStore'
import { engineCommandManager } from '../lang/std/engineConnection'
export function useEngineConnectionSubscriptions() {
const { setCursor2, setHighlightRange, highlightRange } = useStore((s) => ({
const {
engineCommandManager,
setCursor2,
setHighlightRange,
highlightRange,
} = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
setCursor2: s.setCursor2,
setHighlightRange: s.setHighlightRange,
highlightRange: s.highlightRange,

View File

@ -1,90 +1,53 @@
import { useLayoutEffect, useEffect, useRef } from 'react'
import { useLayoutEffect } from 'react'
import { _executor } from '../lang/executor'
import { useStore } from '../useStore'
import { engineCommandManager } from '../lang/std/engineConnection'
import { deferExecution } from 'lib/utils'
import { EngineCommandManager } from '../lang/std/engineConnection'
export function useSetupEngineManager(
streamRef: React.RefObject<HTMLDivElement>,
token?: string
) {
const {
setEngineCommandManager,
setMediaStream,
setIsStreamReady,
setStreamDimensions,
executeCode,
streamDimensions,
} = useStore((s) => ({
setEngineCommandManager: s.setEngineCommandManager,
setMediaStream: s.setMediaStream,
setIsStreamReady: s.setIsStreamReady,
setStreamDimensions: s.setStreamDimensions,
executeCode: s.executeCode,
streamDimensions: s.streamDimensions,
}))
const streamWidth = streamRef?.current?.offsetWidth
const streamHeight = streamRef?.current?.offsetHeight
const hasSetNonZeroDimensions = useRef<boolean>(false)
useLayoutEffect(() => {
// Load the engine command manager once with the initial width and height,
// then we do not want to reload it.
const { width: quadWidth, height: quadHeight } = getDimensions(
streamWidth,
streamHeight
)
if (!hasSetNonZeroDimensions.current && quadHeight && quadWidth) {
engineCommandManager.start({
setMediaStream,
setIsStreamReady,
width: quadWidth,
height: quadHeight,
token,
})
engineCommandManager.waitForReady.then(() => {
executeCode()
})
setStreamDimensions({
streamWidth: quadWidth,
streamHeight: quadHeight,
})
hasSetNonZeroDimensions.current = true
}
}, [streamRef?.current?.offsetWidth, streamRef?.current?.offsetHeight])
useEffect(() => {
const handleResize = deferExecution(() => {
const { width, height } = getDimensions(
streamRef?.current?.offsetWidth,
streamRef?.current?.offsetHeight
)
if (
streamDimensions.streamWidth !== width ||
streamDimensions.streamHeight !== height
) {
engineCommandManager.handleResize({
streamWidth: width,
streamHeight: height,
})
setStreamDimensions({
streamWidth: width,
streamHeight: height,
})
}
}, 500)
window.addEventListener('resize', handleResize)
return () => {
window.removeEventListener('resize', handleResize)
}
}, [])
}
function getDimensions(streamWidth?: number, streamHeight?: number) {
const width = streamWidth ? streamWidth : 0
const quadWidth = Math.round(width / 4) * 4
const height = streamHeight ? streamHeight : 0
const quadHeight = Math.round(height / 4) * 4
return { width: quadWidth, height: quadHeight }
useLayoutEffect(() => {
setStreamDimensions({
streamWidth: quadWidth,
streamHeight: quadHeight,
})
if (!width || !height) return
const eng = new EngineCommandManager({
setMediaStream,
setIsStreamReady,
width: quadWidth,
height: quadHeight,
token,
})
setEngineCommandManager(eng)
eng.waitForReady.then(() => {
executeCode()
})
return () => {
eng?.tearDown()
}
}, [quadWidth, quadHeight])
}

View File

@ -48,7 +48,7 @@ export function useConvertToVariable() {
updateAst(_modifiedAst, true)
} catch (e) {
console.log('error', e)
console.log('e', e)
}
}

View File

@ -1691,6 +1691,7 @@ describe('parsing errors', () => {
let _theError
try {
const result = expect(parser_wasm(code))
console.log('result', result)
} catch (e) {
_theError = e
}

View File

@ -7,7 +7,7 @@ export const recast = (ast: Program): string => {
return s
} catch (e) {
// TODO: do something real with the error.
console.log('recast error', e)
console.log('recast', e)
throw e
}
}

View File

@ -595,12 +595,7 @@ export class EngineCommandManager {
[localUnsubscribeId: string]: (a: any) => void
}
} = {} as any
constructor() {
this.engineConnection = undefined
}
start({
constructor({
setMediaStream,
setIsStreamReady,
width,
@ -613,16 +608,6 @@ export class EngineCommandManager {
height: number
token?: string
}) {
if (width === 0 || height === 0) {
return
}
// If we already have an engine connection, just need to resize the stream.
if (this.engineConnection) {
this.handleResize({ streamWidth: width, streamHeight: height })
return
}
this.waitForReady = new Promise((resolve) => {
this.resolveReady = resolve
})
@ -704,35 +689,7 @@ export class EngineCommandManager {
this.engineConnection?.connect()
}
handleResize({
streamWidth,
streamHeight,
}: {
streamWidth: number
streamHeight: number
}) {
console.log('handleResize', streamWidth, streamHeight)
if (!this.engineConnection?.isReady()) {
return
}
const resizeCmd: EngineCommand = {
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'reconfigure_stream',
width: streamWidth,
height: streamHeight,
fps: 60,
},
}
this.engineConnection?.send(resizeCmd)
}
handleModelingCommand(message: WebSocketResponse, id: string) {
if (this.engineConnection === undefined) {
return
}
if (message.type !== 'modeling') {
return
}
@ -897,9 +854,6 @@ export class EngineCommandManager {
})
}
sendSceneCommand(command: EngineCommand): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
if (
command.type === 'modeling_cmd_req' &&
command.cmd.type !== lastMessage
@ -951,9 +905,6 @@ export class EngineCommandManager {
range: SourceRange
command: EngineCommand | string
}): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
this.sourceRangeMap[id] = range
if (!this.engineConnection?.isReady()) {
@ -999,9 +950,6 @@ export class EngineCommandManager {
rangeStr: string,
commandStr: string
): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
if (id === undefined) {
throw new Error('id is undefined')
}
@ -1052,9 +1000,6 @@ export class EngineCommandManager {
}
}
private async fixIdMappings(ast: Program, programMemory: ProgramMemory) {
if (this.engineConnection === undefined) {
return
}
/* This is a temporary solution since the cmd_ids that are sent through when
sending 'extend_path' ids are not used as the segment ids.
@ -1134,5 +1079,3 @@ export class EngineCommandManager {
})
}
}
export const engineCommandManager = new EngineCommandManager()

View File

@ -1279,7 +1279,7 @@ export function getTransformInfos(
}) as TransformInfo[]
return theTransforms
} catch (error) {
console.log('error', error)
console.log(error)
return []
}
}

View File

@ -11,7 +11,7 @@ export async function asyncLexer(str: string): Promise<Token[]> {
return tokens
} catch (e) {
// TODO: do something real with the error.
console.log('lexer error', e)
console.log('lexer', e)
throw e
}
}
@ -22,7 +22,7 @@ export function lexer(str: string): Token[] {
return tokens
} catch (e) {
// TODO: do something real with the error.
console.log('lexer error', e)
console.log('lexer', e)
throw e
}
}

View File

@ -39,6 +39,6 @@ export async function exportSave(data: ArrayBuffer) {
}
} catch (e) {
// TODO: do something real with the error.
console.log('export error', e)
console.log('export', e)
}
}

View File

@ -36,7 +36,7 @@ export async function initializeProjectDirectory(directory: string) {
try {
docDirectory = await documentDir()
} catch (e) {
console.log('error', e)
console.log(e)
docDirectory = await homeDir() // seems to work better on Linux
}

View File

@ -75,12 +75,11 @@ export async function executor(
ast: Program,
pm: ProgramMemory = { root: {}, return: null }
): Promise<ProgramMemory> {
const engineCommandManager = new EngineCommandManager()
engineCommandManager.start({
const engineCommandManager = new EngineCommandManager({
setIsStreamReady: () => {},
setMediaStream: () => {},
width: 0,
height: 0,
width: 100,
height: 100,
})
await engineCommandManager.waitForReady
engineCommandManager.startNewSession()

View File

@ -19,7 +19,6 @@ import { KCLError } from './lang/errors'
import { deferExecution } from 'lib/utils'
import { _executor } from './lang/executor'
import { bracket } from 'lib/exampleKcl'
import { engineCommandManager } from './lang/std/engineConnection'
export type Selection = {
type: 'default' | 'line-end' | 'line-mid'
@ -163,6 +162,8 @@ export interface StoreState {
setProgramMemory: (programMemory: ProgramMemory) => void
isShiftDown: boolean
setIsShiftDown: (isShiftDown: boolean) => void
engineCommandManager?: EngineCommandManager
setEngineCommandManager: (engineCommandManager: EngineCommandManager) => void
mediaStream?: MediaStream
setMediaStream: (mediaStream: MediaStream) => void
isStreamReady: boolean
@ -225,7 +226,7 @@ export const useStore = create<StoreState>()(
const result = await executeCode({
code: code || get().code,
lastAst: get().ast,
engineCommandManager: engineCommandManager,
engineCommandManager: get().engineCommandManager,
})
if (!result.isChange) {
return
@ -331,6 +332,8 @@ export const useStore = create<StoreState>()(
executeAst: async (ast) => {
const _ast = ast || get().ast
if (!get().isStreamReady) return
const engineCommandManager = get().engineCommandManager!
if (!engineCommandManager) return
set({ isExecuting: true })
const { logs, errors, programMemory } = await executeAst({
@ -347,6 +350,8 @@ export const useStore = create<StoreState>()(
executeAstMock: async (ast) => {
const _ast = ast || get().ast
if (!get().isStreamReady) return
const engineCommandManager = get().engineCommandManager!
if (!engineCommandManager) return
const { logs, errors, programMemory } = await executeAst({
ast: _ast,
@ -430,6 +435,8 @@ export const useStore = create<StoreState>()(
setProgramMemory: (programMemory) => set({ programMemory }),
isShiftDown: false,
setIsShiftDown: (isShiftDown) => set({ isShiftDown }),
setEngineCommandManager: (engineCommandManager) =>
set({ engineCommandManager }),
setMediaStream: (mediaStream) => set({ mediaStream }),
isStreamReady: false,
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
@ -447,9 +454,7 @@ export const useStore = create<StoreState>()(
fileId: '',
setFileId: (fileId) => set({ fileId }),
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
setStreamDimensions: (streamDimensions) => {
set({ streamDimensions })
},
setStreamDimensions: (streamDimensions) => set({ streamDimensions }),
isExecuting: false,
setIsExecuting: (isExecuting) => set({ isExecuting }),
@ -514,7 +519,7 @@ async function executeCode({
}: {
code: string
lastAst: Program
engineCommandManager: EngineCommandManager
engineCommandManager?: EngineCommandManager
}): Promise<
| {
logs: string[]
@ -534,7 +539,7 @@ async function executeCode({
if (e instanceof KCLError) {
errors = [e]
logs = []
if (e.msg === 'file is empty') engineCommandManager.endSession()
if (e.msg === 'file is empty') engineCommandManager?.endSession()
}
return {
isChange: true,
@ -557,7 +562,7 @@ async function executeCode({
}
// Check if the ast we have is equal to the ast in the storage.
// If it is, we don't need to update the ast.
if (JSON.stringify(ast) === JSON.stringify(lastAst))
if (!engineCommandManager || JSON.stringify(ast) === JSON.stringify(lastAst))
return { isChange: false }
const { logs, errors, programMemory } = await executeAst({

View File

@ -226,6 +226,17 @@ version = "0.21.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2"
[[package]]
name = "bigdecimal"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "bigdecimal"
version = "0.4.1"
@ -1394,6 +1405,7 @@ dependencies = [
"lazy_static",
"parse-display",
"pretty_assertions",
"regex",
"reqwest",
"schemars",
"serde",
@ -1407,7 +1419,6 @@ dependencies = [
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"winnow",
]
[[package]]
@ -1419,7 +1430,7 @@ dependencies = [
"anyhow",
"async-trait",
"base64 0.21.4",
"bigdecimal",
"bigdecimal 0.4.1",
"bytes",
"chrono",
"data-encoding",
@ -1719,7 +1730,7 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
[[package]]
name = "openapitor"
version = "0.0.9"
source = "git+https://github.com/KittyCAD/kittycad.rs?branch=main#61a16059b3eaf8793a2a2e1edbc0d770f284fea3"
source = "git+https://github.com/KittyCAD/kittycad.rs?branch=main#0d121f6881da91b4a30bee18bbfe50e4a2096073"
dependencies = [
"Inflector",
"anyhow",
@ -2452,7 +2463,8 @@ version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c"
dependencies = [
"bigdecimal",
"bigdecimal 0.3.1",
"bigdecimal 0.4.1",
"bytes",
"chrono",
"dyn-clone",
@ -3081,9 +3093,9 @@ dependencies = [
[[package]]
name = "tokio-tungstenite"
version = "0.20.1"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2"
dependencies = [
"futures-util",
"log",
@ -3303,9 +3315,9 @@ dependencies = [
[[package]]
name = "tungstenite"
version = "0.20.1"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
dependencies = [
"byteorder",
"bytes",
@ -3792,15 +3804,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "winnow"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.50.0"

View File

@ -18,13 +18,13 @@ derive-docs = { path = "../derive-docs" }
kittycad = { version = "0.2.25", default-features = false, features = ["js"] }
lazy_static = "1.4.0"
parse-display = "0.8.2"
regex = "1.7.1"
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.107"
thiserror = "1.0.48"
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
winnow = "0.5.15"
[target.'cfg(target_arch = "wasm32")'.dependencies]
js-sys = { version = "0.3.64" }

View File

@ -1,32 +1,24 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use criterion::{criterion_group, criterion_main, Criterion};
pub fn bench_lex(c: &mut Criterion) {
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
}
pub fn bench_lex_parse(c: &mut Criterion) {
pub fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM)));
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM)));
}
fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program));
c.bench_function("parse_lex_big kitt", |b| {
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/kittycad_svg.kcl")))
});
c.bench_function("parse_lex_pipes_on_pipes", |b| {
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl")))
});
}
fn lex_and_parse(program: &str) {
let tokens = kcl_lib::token::lexer(program);
let tokens = kcl_lib::tokeniser::lexer(program);
let parser = kcl_lib::parser::Parser::new(tokens);
black_box(parser.ast().unwrap());
parser.ast().unwrap();
}
criterion_group!(benches, bench_lex, bench_lex_parse);
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
const sg = startSketchAt(pos)
|> line([0, scale], %)

View File

@ -709,6 +709,7 @@ dependencies = [
"kittycad",
"lazy_static",
"parse-display",
"regex",
"reqwest",
"schemars",
"serde",
@ -722,7 +723,6 @@ dependencies = [
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"winnow",
]
[[package]]
@ -1878,9 +1878,9 @@ dependencies = [
[[package]]
name = "tungstenite"
version = "0.20.1"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
dependencies = [
"byteorder",
"bytes",
@ -2158,15 +2158,6 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "winnow"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
dependencies = [
"memchr",
]
[[package]]
name = "winreg"
version = "0.50.0"

View File

@ -166,7 +166,7 @@ pub async fn modify_ast_for_sketch(
let recasted = program.recast(&FormatOptions::default(), 0);
// Re-parse the ast so we get the correct source ranges.
let tokens = crate::token::lexer(&recasted);
let tokens = crate::tokeniser::lexer(&recasted);
let parser = crate::parser::Parser::new(tokens);
*program = parser.ast()?;

View File

@ -2691,7 +2691,7 @@ fn ghi = (x) => {
}
show(part001)"#;
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let symbols = program.get_lsp_symbols(code);
@ -2719,7 +2719,7 @@ show(part001)
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|> line([0.4900857016, -0.0240763666], %)
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2738,7 +2738,7 @@ show(part001)
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|> line([0.4900857016, -0.0240763666], %) // hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2757,7 +2757,7 @@ show(part001)
|> line([0.4900857016, -0.0240763666], %)
// hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2783,7 +2783,7 @@ show(part001)
// this is also a comment
return things
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2820,7 +2820,7 @@ const mySk1 = startSketchAt([0, 0])
|> ry(45, %)
|> rx(45, %)
// one more for good measure"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2859,7 +2859,7 @@ a comment between pipe expression statements */
|> line([-0.42, -1.72], %)
show(part001)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2885,7 +2885,7 @@ const yo = [
" hey oooooo really long long long"
]
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2903,7 +2903,7 @@ const key = 'c'
const things = "things"
// this is also a comment"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2921,7 +2921,7 @@ const things = "things"
// a comment
"
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2946,7 +2946,7 @@ const part001 = startSketchAt([0, 0])
-angleToMatchLengthY('seg01', myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -2972,7 +2972,7 @@ const part001 = startSketchAt([0, 0])
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3003,7 +3003,7 @@ fn ghi = (part001) => {
}
show(part001)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("mySuperCoolPart", 6);
@ -3034,7 +3034,7 @@ show(mySuperCoolPart)
let some_program_string = r#"fn ghi = (x, y, z) => {
return x
}"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
program.rename_symbol("newName", 10);
@ -3063,7 +3063,7 @@ const firstExtrude = startSketchAt([0,0])
|> extrude(h, %)
show(firstExtrude)"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3089,7 +3089,7 @@ show(firstExtrude)
#[tokio::test(flavor = "multi_thread")]
async fn test_recast_math_start_negative() {
let some_program_string = r#"const myVar = -5 + 6"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
@ -3105,7 +3105,7 @@ const FOS = 2
const sigmaAllow = 8
const width = 20
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
let tokens = crate::token::lexer(some_program_string);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();

View File

@ -620,22 +620,6 @@ pub async fn execute(
let result = call_expr.execute(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::BinaryExpression(binary_expression) => {
let result = binary_expression.get_result(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::UnaryExpression(unary_expression) => {
let result = unary_expression.get_result(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::ObjectExpression(object_expression) => {
let result = object_expression.execute(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::ArrayExpression(array_expression) => {
let result = array_expression.execute(memory, &mut pipe_info, engine).await?;
args.push(result);
}
// We do nothing for the rest.
_ => (),
}
@ -695,7 +679,7 @@ pub async fn execute(
message: format!(
"Expected {} arguments, got {}",
function_expression.params.len(),
args.len(),
args.len()
),
source_ranges: vec![(&function_expression).into()],
}));
@ -820,7 +804,7 @@ mod tests {
use super::*;
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?;
let mut mem: ProgramMemory = Default::default();

View File

@ -9,4 +9,4 @@ pub mod math_parser;
pub mod parser;
pub mod server;
pub mod std;
pub mod token;
pub mod tokeniser;

View File

@ -10,8 +10,8 @@ use crate::{
},
errors::{KclError, KclErrorDetails},
executor::SourceRange,
parser::Parser,
token::{Token, TokenType},
parser::{is_not_code_token, Parser},
tokeniser::{Token, TokenType},
};
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
@ -334,7 +334,7 @@ impl ReversePolishNotation {
return rpn.parse();
}
if !current_token.is_code_token() {
if is_not_code_token(current_token) {
let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators);
return rpn.parse();
}
@ -704,7 +704,7 @@ mod test {
#[test]
fn test_parse_expression() {
let tokens = crate::token::lexer("1 + 2");
let tokens = crate::tokeniser::lexer("1 + 2");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -731,7 +731,7 @@ mod test {
#[test]
fn test_parse_expression_add_no_spaces() {
let tokens = crate::token::lexer("1+2");
let tokens = crate::tokeniser::lexer("1+2");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -758,7 +758,7 @@ mod test {
#[test]
fn test_parse_expression_sub_no_spaces() {
let tokens = crate::token::lexer("1 -2");
let tokens = crate::tokeniser::lexer("1 -2");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -785,7 +785,7 @@ mod test {
#[test]
fn test_parse_expression_plus_followed_by_star() {
let tokens = crate::token::lexer("1 + 2 * 3");
let tokens = crate::tokeniser::lexer("1 + 2 * 3");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -823,7 +823,7 @@ mod test {
#[test]
fn test_parse_expression_with_parentheses() {
let tokens = crate::token::lexer("1 * ( 2 + 3 )");
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -861,7 +861,7 @@ mod test {
#[test]
fn test_parse_expression_parens_in_middle() {
let tokens = crate::token::lexer("1 * ( 2 + 3 ) / 4");
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -910,7 +910,7 @@ mod test {
#[test]
fn test_parse_expression_parans_and_predence() {
let tokens = crate::token::lexer("1 + ( 2 + 3 ) / 4");
let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -958,7 +958,7 @@ mod test {
}
#[test]
fn test_parse_expression_nested() {
let tokens = crate::token::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -1017,7 +1017,7 @@ mod test {
}
#[test]
fn test_parse_expression_redundant_braces() {
let tokens = crate::token::lexer("1 * ((( 2 + 3 )))");
let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))");
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -1055,7 +1055,7 @@ mod test {
#[test]
fn test_reverse_polish_notation_simple() {
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2"), &[], &[]);
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2"), &[], &[]);
let result = parser.parse().unwrap();
assert_eq!(
result,
@ -1084,7 +1084,7 @@ mod test {
#[test]
fn test_reverse_polish_notation_complex() {
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2 * 3"), &[], &[]);
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2 * 3"), &[], &[]);
let result = parser.parse().unwrap();
assert_eq!(
result,
@ -1125,7 +1125,7 @@ mod test {
#[test]
fn test_reverse_polish_notation_complex_with_parentheses() {
let parser = ReversePolishNotation::new(&crate::token::lexer("1 * ( 2 + 3 )"), &[], &[]);
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &[], &[]);
let result = parser.parse().unwrap();
assert_eq!(
result,
@ -1179,7 +1179,7 @@ mod test {
#[test]
fn test_parse_expression_redundant_braces_around_literal() {
let code = "2 + (((3)))";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap();
assert_eq!(
@ -1274,7 +1274,7 @@ mod test {
#[test]
fn test_parse_expression_braces_around_lots_of_math() {
let code = "(distance * p * FOS * 6 / (sigmaAllow * width))";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let mut parser = MathParser::new(&tokens);
let result = parser.parse();
assert!(result.is_ok());
@ -1283,7 +1283,7 @@ mod test {
#[test]
fn test_parse_expression_braces_around_internals_lots_of_math() {
let code = "distance * p * FOS * 6 / (sigmaAllow * width)";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let mut parser = MathParser::new(&tokens);
let result = parser.parse();
assert!(result.is_ok());

View File

@ -10,7 +10,7 @@ use crate::{
},
errors::{KclError, KclErrorDetails},
math_parser::MathParser,
token::{Token, TokenType},
tokeniser::{Token, TokenType},
};
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
@ -249,7 +249,7 @@ impl Parser {
}
let current_token = self.get_token(index)?;
if !current_token.is_code_token() {
if is_not_code_token(current_token) {
return self.find_end_of_non_code_node(index + 1);
}
@ -262,7 +262,7 @@ impl Parser {
}
let current_token = self.get_token(index)?;
if !current_token.is_code_token() {
if is_not_code_token(current_token) {
return self.find_start_of_non_code_node(index - 1);
}
@ -365,7 +365,7 @@ impl Parser {
});
};
if !token.is_code_token() {
if is_not_code_token(token) {
let non_code_node = self.make_non_code_node(new_index)?;
let new_new_index = non_code_node.1 + 1;
let bonus_non_code_node = non_code_node.0;
@ -1623,7 +1623,7 @@ impl Parser {
});
}
if !token.is_code_token() {
if is_not_code_token(token) {
let next_token = self.next_meaningful_token(token_index, Some(0))?;
if let Some(node) = &next_token.non_code_node {
if previous_body.is_empty() {
@ -1788,6 +1788,12 @@ impl Parser {
}
}
pub fn is_not_code_token(token: &Token) -> bool {
token.token_type == TokenType::Whitespace
|| token.token_type == TokenType::LineComment
|| token.token_type == TokenType::BlockComment
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
@ -1797,7 +1803,7 @@ mod tests {
#[test]
fn test_make_identifier() {
let tokens = crate::token::lexer("a");
let tokens = crate::tokeniser::lexer("a");
let parser = Parser::new(tokens);
let identifier = parser.make_identifier(0).unwrap();
assert_eq!(
@ -1812,7 +1818,7 @@ mod tests {
#[test]
fn test_make_identifier_with_const_myvar_equals_5_and_index_2() {
let tokens = crate::token::lexer("const myVar = 5");
let tokens = crate::tokeniser::lexer("const myVar = 5");
let parser = Parser::new(tokens);
let identifier = parser.make_identifier(2).unwrap();
assert_eq!(
@ -1827,7 +1833,7 @@ mod tests {
#[test]
fn test_make_identifier_multiline() {
let tokens = crate::token::lexer("const myVar = 5\nconst newVar = myVar + 1");
let tokens = crate::tokeniser::lexer("const myVar = 5\nconst newVar = myVar + 1");
let parser = Parser::new(tokens);
let identifier = parser.make_identifier(2).unwrap();
assert_eq!(
@ -1851,7 +1857,7 @@ mod tests {
#[test]
fn test_make_identifier_call_expression() {
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens);
let identifier = parser.make_identifier(0).unwrap();
assert_eq!(
@ -1874,7 +1880,7 @@ mod tests {
}
#[test]
fn test_make_non_code_node() {
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens);
let index = 4;
let expected_output = (None, 4);
@ -1883,7 +1889,7 @@ mod tests {
let index = 7;
let expected_output = (None, 7);
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"
const yo = { a: { b: { c: '123' } } }
// this is a comment
@ -1914,7 +1920,7 @@ const key = 'c'"#,
31,
);
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is
@ -1940,7 +1946,7 @@ const key = 'c'"#,
#[test]
fn test_collect_object_keys() {
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
let parser = Parser::new(tokens);
let keys_info = parser.collect_object_keys(6, None, false).unwrap();
assert_eq!(keys_info.len(), 2);
@ -1960,7 +1966,7 @@ const key = 'c'"#,
#[test]
fn test_make_literal_call_expression() {
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens);
let literal = parser.make_literal(2).unwrap();
assert_eq!(
@ -1984,88 +1990,74 @@ const key = 'c'"#,
);
}
#[test]
fn test_is_code_token() {
let tokens = [
Token {
token_type: TokenType::Word,
start: 0,
end: 3,
value: "log".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 3,
end: 4,
value: "(".to_string(),
},
Token {
token_type: TokenType::Number,
start: 4,
end: 5,
value: "5".to_string(),
},
Token {
token_type: TokenType::Comma,
start: 5,
end: 6,
value: ",".to_string(),
},
Token {
token_type: TokenType::String,
start: 7,
end: 14,
value: "\"hello\"".to_string(),
},
Token {
token_type: TokenType::Word,
start: 16,
end: 27,
value: "aIdentifier".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 27,
end: 28,
value: ")".to_string(),
},
];
for (i, token) in tokens.iter().enumerate() {
assert!(token.is_code_token(), "failed test {i}: {token:?}")
}
}
#[test]
fn test_is_not_code_token() {
let tokens = [
Token {
token_type: TokenType::Whitespace,
start: 6,
end: 7,
value: " ".to_string(),
},
Token {
token_type: TokenType::BlockComment,
start: 28,
end: 30,
value: "/* abte */".to_string(),
},
Token {
token_type: TokenType::LineComment,
start: 30,
end: 33,
value: "// yoyo a line".to_string(),
},
];
for (i, token) in tokens.iter().enumerate() {
assert!(!token.is_code_token(), "failed test {i}: {token:?}")
}
assert!(!is_not_code_token(&Token {
token_type: TokenType::Word,
start: 0,
end: 3,
value: "log".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Brace,
start: 3,
end: 4,
value: "(".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Number,
start: 4,
end: 5,
value: "5".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Comma,
start: 5,
end: 6,
value: ",".to_string(),
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::Whitespace,
start: 6,
end: 7,
value: " ".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::String,
start: 7,
end: 14,
value: "\"hello\"".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Word,
start: 16,
end: 27,
value: "aIdentifier".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Brace,
start: 27,
end: 28,
value: ")".to_string(),
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::BlockComment,
start: 28,
end: 30,
value: "/* abte */".to_string(),
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::LineComment,
start: 30,
end: 33,
value: "// yoyo a line".to_string(),
}));
}
#[test]
fn test_next_meaningful_token() {
let _offset = 1;
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is
@ -2451,7 +2443,7 @@ const key = 'c'"#,
#[test]
fn test_find_closing_brace() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is
@ -2468,16 +2460,16 @@ const key = 'c'"#,
assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92);
let basic = "( hey )";
let parser = Parser::new(crate::token::lexer(basic));
let parser = Parser::new(crate::tokeniser::lexer(basic));
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4);
let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)";
let parser = Parser::new(crate::token::lexer(handles_non_zero_index));
let parser = Parser::new(crate::tokeniser::lexer(handles_non_zero_index));
assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4);
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6);
let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }";
let parser = Parser::new(crate::token::lexer(handles_nested));
let parser = Parser::new(crate::tokeniser::lexer(handles_nested));
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18);
// TODO expect error when not started on a brace
@ -2485,7 +2477,7 @@ const key = 'c'"#,
#[test]
fn test_is_call_expression() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is
@ -2506,7 +2498,7 @@ const key = 'c'"#,
#[test]
fn test_find_next_declaration_keyword() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is
@ -2521,7 +2513,7 @@ const key = 'c'"#,
TokenReturn { token: None, index: 92 }
);
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const myVar = 5
const newVar = myVar + 1
"#,
@ -2551,7 +2543,7 @@ const newVar = myVar + 1
lineTo(2, 3)
} |> rx(45, %)
"#;
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens);
assert_eq!(
parser.has_pipe_operator(0, None).unwrap(),
@ -2570,7 +2562,7 @@ const newVar = myVar + 1
lineTo(2, 3)
} |> rx(45, %) |> rx(45, %)
"#;
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens);
assert_eq!(
parser.has_pipe_operator(0, None).unwrap(),
@ -2592,7 +2584,7 @@ const newVar = myVar + 1
const yo = myFunc(9()
|> rx(45, %)
"#;
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens);
assert_eq!(
parser.has_pipe_operator(0, None).unwrap(),
@ -2604,7 +2596,7 @@ const yo = myFunc(9()
);
let code = "const myVar2 = 5 + 1 |> myFn(%)";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens);
assert_eq!(
parser.has_pipe_operator(1, None).unwrap(),
@ -2626,7 +2618,7 @@ const yo = myFunc(9()
lineTo(1,1)
} |> rx(90, %)
show(mySk1)"#;
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap();
// loop through getting the token and it's index
@ -2666,7 +2658,7 @@ show(mySk1)"#;
#[test]
fn test_make_member_expression() {
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
let parser = Parser::new(tokens);
let member_expression_return = parser.make_member_expression(6).unwrap();
let member_expression = member_expression_return.expression;
@ -2708,63 +2700,63 @@ show(mySk1)"#;
#[test]
fn test_find_end_of_binary_expression() {
let code = "1 + 2 * 3\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3");
let code = "(1 + 25) / 5 - 3\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3");
let index_of_5 = code.find('5').unwrap();
let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap();
assert_eq!(end_starting_at_the_5, end);
// whole thing wrapped
// whole thing wraped
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2);
// whole thing wrapped but given index after the first brace
// whole thing wraped but given index after the first brace
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(1).unwrap();
assert_eq!(tokens[end].value, "3");
// given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)'
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(2).unwrap();
assert_eq!(tokens[end].value, "2");
// lots of silly nesting
let code = "(1 + 2) / (5 - (3))\nconst yo = 5";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
// with pipe operator at the end
let code = "(1 + 2) / (5 - (3))\n |> fn(%)";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
// with call expression at the start of binary expression
let code = "yo(2) + 3\n |> fn(%)";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3");
// with call expression at the end of binary expression
let code = "3 + yo(2)\n |> fn(%)";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens);
let _end = parser.find_end_of_binary_expression(0).unwrap();
// with call expression at the end of binary expression
let code = "-legX + 2, ";
let tokens = crate::token::lexer(code);
let tokens = crate::tokeniser::lexer(code);
let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "2");
@ -2773,7 +2765,7 @@ show(mySk1)"#;
#[test]
fn test_make_array_expression() {
// input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]}
let tokens = crate::token::lexer("const yo = [\"1\", 2, three]");
let tokens = crate::tokeniser::lexer("const yo = [\"1\", 2, three]");
let parser = Parser::new(tokens);
let array_expression = parser.make_array_expression(6).unwrap();
let expression = array_expression.expression;
@ -2812,7 +2804,7 @@ show(mySk1)"#;
#[test]
fn test_make_call_expression() {
let tokens = crate::token::lexer("foo(\"a\", a, 3)");
let tokens = crate::tokeniser::lexer("foo(\"a\", a, 3)");
let parser = Parser::new(tokens);
let result = parser.make_call_expression(0).unwrap();
assert_eq!(result.last_index, 9);
@ -2846,7 +2838,7 @@ show(mySk1)"#;
#[test]
fn test_make_variable_declaration() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const yo = startSketch([0, 0])
|> lineTo([1, myVar], %)
|> foo(myVar2, %)
@ -2916,7 +2908,7 @@ show(mySk1)"#;
#[test]
fn test_make_body() {
let tokens = crate::token::lexer("const myVar = 5");
let tokens = crate::tokeniser::lexer("const myVar = 5");
let parser = Parser::new(tokens);
let body = parser
.make_body(
@ -2934,7 +2926,7 @@ show(mySk1)"#;
#[test]
fn test_abstract_syntax_tree() {
let code = "5 +6";
let parser = Parser::new(crate::token::lexer(code));
let parser = Parser::new(crate::tokeniser::lexer(code));
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
@ -2972,8 +2964,8 @@ show(mySk1)"#;
#[test]
fn test_empty_file() {
let some_program_string = r#""#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("file is empty"));
@ -2981,7 +2973,7 @@ show(mySk1)"#;
#[test]
fn test_parse_half_pipe_small() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
"const secondExtrude = startSketchAt([0,0])
|",
);
@ -2993,14 +2985,14 @@ show(mySk1)"#;
#[test]
fn test_parse_member_expression_double_nested_braces() {
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
let tokens = crate::tokeniser::lexer(r#"const prop = yo["one"][two]"#);
let parser = Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_parse_member_expression_binary_expression_period_number_first() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj.a"#,
);
@ -3010,7 +3002,7 @@ const height = 1 - obj.a"#,
#[test]
fn test_parse_member_expression_binary_expression_brace_number_first() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = 1 - obj["a"]"#,
);
@ -3020,7 +3012,7 @@ const height = 1 - obj["a"]"#,
#[test]
fn test_parse_member_expression_binary_expression_brace_number_second() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = obj["a"] - 1"#,
);
@ -3030,7 +3022,7 @@ const height = obj["a"] - 1"#,
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_first() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [1 - obj["a"], 0]"#,
);
@ -3040,7 +3032,7 @@ const height = [1 - obj["a"], 0]"#,
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_second() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] - 1, 0]"#,
);
@ -3050,7 +3042,7 @@ const height = [obj["a"] - 1, 0]"#,
#[test]
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] -1, 0]"#,
);
@ -3060,7 +3052,7 @@ const height = [obj["a"] -1, 0]"#,
#[test]
fn test_parse_half_pipe() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
"const height = 10
const firstExtrude = startSketchAt([0,0])
@ -3083,17 +3075,15 @@ const secondExtrude = startSketchAt([0,0])
#[test]
fn test_parse_greater_bang() {
let tokens = crate::token::lexer(">!");
let tokens = crate::tokeniser::lexer(">!");
let parser = Parser::new(tokens);
let err = parser.ast().unwrap_err();
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(err.to_string().contains("file is empty"));
let result = parser.ast();
assert!(result.is_ok());
}
#[test]
fn test_parse_z_percent_parens() {
let tokens = crate::token::lexer("z%)");
let tokens = crate::tokeniser::lexer("z%)");
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -3102,17 +3092,15 @@ const secondExtrude = startSketchAt([0,0])
#[test]
fn test_parse_parens_unicode() {
let tokens = crate::token::lexer("");
let tokens = crate::tokeniser::lexer("");
let parser = Parser::new(tokens);
let result = parser.ast();
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(result.is_err());
assert!(result.is_ok());
}
#[test]
fn test_parse_negative_in_array_binary_expression() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"const leg1 = 5
const thickness = 0.56
@ -3126,7 +3114,7 @@ const bracket = [-leg2 + thickness, 0]
#[test]
fn test_parse_nested_open_brackets() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"
z(-[["#,
);
@ -3141,38 +3129,31 @@ z(-[["#,
#[test]
fn test_parse_weird_new_line_function() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"z
(--#"#,
);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 1])], message: "missing a closing brace for the function call" }"#
);
}
#[test]
fn test_parse_weird_lots_of_fancy_brackets() {
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
assert!(result.err().unwrap().to_string().contains("unexpected end"));
}
#[test]
fn test_parse_weird_close_before_open() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"fn)n
e
["#,
@ -3189,7 +3170,7 @@ e
#[test]
fn test_parse_weird_close_before_nada() {
let tokens = crate::token::lexer(r#"fn)n-"#);
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
let parser = Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
@ -3198,7 +3179,7 @@ e
#[test]
fn test_parse_weird_lots_of_slashes() {
let tokens = crate::token::lexer(
let tokens = crate::tokeniser::lexer(
r#"J///////////o//+///////////P++++*++++++P///////˟
++4"#,
);
@ -3215,7 +3196,7 @@ e
#[test]
fn test_parse_expand_array() {
let code = "const myArray = [0..10]";
let parser = Parser::new(crate::token::lexer(code));
let parser = Parser::new(crate::tokeniser::lexer(code));
let result = parser.ast().unwrap();
let expected_result = Program {
start: 0,
@ -3318,8 +3299,8 @@ e
#[test]
fn test_error_keyword_in_variable() {
let some_program_string = r#"const let = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3331,8 +3312,8 @@ e
#[test]
fn test_error_keyword_in_fn_name() {
let some_program_string = r#"fn let = () {}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3344,8 +3325,8 @@ e
#[test]
fn test_error_stdlib_in_fn_name() {
let some_program_string = r#"fn cos = () {}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3359,8 +3340,8 @@ e
let some_program_string = r#"fn thing = (let) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3374,8 +3355,8 @@ e
let some_program_string = r#"fn thing = (cos) => {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3392,8 +3373,8 @@ e
}
firstPrimeNumber()
"#;
let tokens = crate::token::lexer(program);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(program);
let parser = crate::parser::Parser::new(tokens);
let _ast = parser.ast().unwrap();
}
@ -3405,8 +3386,8 @@ e
thing(false)
"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -3422,8 +3403,8 @@ thing(false)
"#,
name
);
let tokens = crate::token::lexer(&some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(&some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3440,8 +3421,8 @@ thing(false)
#[test]
fn test_error_define_var_as_function() {
let some_program_string = r#"fn thing = "thing""#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
assert!(result.is_err());
assert_eq!(
@ -3469,8 +3450,8 @@ const pt2 = b2[0]
show(b1)
show(b2)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
@ -3478,36 +3459,18 @@ show(b2)"#;
fn test_math_with_stdlib() {
let some_program_string = r#"const d2r = pi() / 2
let other_thing = 2 * cos(3)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
#[ignore] // ignore until more stack fixes
fn test_parse_pipes_on_pipes() {
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_negative_arguments() {
let some_program_string = r#"fn box = (p, h, l, w) => {
const myBox = startSketchAt(p)
|> line([0, l], %)
|> line([w, 0], %)
|> line([0, -l], %)
|> close(%)
|> extrude(h, %)
return myBox
}
let myBox = box([0,0], -3, -16, -10)
show(myBox)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
let tokens = crate::tokeniser::lexer(code);
let parser = crate::parser::Parser::new(tokens);
parser.ast().unwrap();
}
}

View File

@ -34,7 +34,7 @@ pub struct Backend {
/// The types of tokens the server supports.
pub token_types: Vec<SemanticTokenType>,
/// Token maps.
pub token_map: DashMap<String, Vec<crate::token::Token>>,
pub token_map: DashMap<String, Vec<crate::tokeniser::Token>>,
/// AST maps.
pub ast_map: DashMap<String, crate::ast::types::Program>,
/// Current code.
@ -56,7 +56,7 @@ impl Backend {
// Lets update the tokens.
self.current_code_map
.insert(params.uri.to_string(), params.text.clone());
let tokens = crate::token::lexer(&params.text);
let tokens = crate::tokeniser::lexer(&params.text);
self.token_map.insert(params.uri.to_string(), tokens.clone());
// Update the semantic tokens map.
@ -69,7 +69,9 @@ impl Backend {
continue;
};
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
if token.token_type == crate::tokeniser::TokenType::Word
&& self.stdlib_completions.contains_key(&token.value)
{
// This is a stdlib function.
token_type = SemanticTokenType::FUNCTION;
}
@ -547,7 +549,7 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::token::lexer(&current_code);
let tokens = crate::tokeniser::lexer(&current_code);
let parser = crate::parser::Parser::new(tokens);
let Ok(ast) = parser.ast() else {
return Ok(None);
@ -579,7 +581,7 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::token::lexer(&current_code);
let tokens = crate::tokeniser::lexer(&current_code);
let parser = crate::parser::Parser::new(tokens);
let Ok(mut ast) = parser.ast() else {
return Ok(None);

View File

@ -856,6 +856,26 @@ async fn inner_arc(data: ArcData, sketch_group: Box<SketchGroup>, args: Args) ->
)
.await?;
// TODO: Dont do this (move path pen) - mike
// lets review what the needs are here and see if any existing arc endpoints can accomplish this
// Move the path pen to the end of the arc.
// Since that is where we want to draw the next path.
// TODO: the engine should automatically move the pen to the end of the arc.
// This just seems inefficient.
args.send_modeling_cmd(
id,
ModelingCmd::MovePathPen {
path: sketch_group.id,
to: Point3D {
x: end.x,
y: end.y,
z: 0.0,
},
},
)
.await?;
let current_path = Path::ToPoint {
base: BasePath {
from: from.into(),

View File

@ -1,173 +0,0 @@
use std::str::FromStr;
use anyhow::Result;
use parse_display::{Display, FromStr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
mod tokeniser;
/// The types of tokens.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
#[display(style = "camelCase")]
pub enum TokenType {
/// A number.
Number,
/// A word.
Word,
/// An operator.
Operator,
/// A string.
String,
/// A keyword.
Keyword,
/// A brace.
Brace,
/// Whitespace.
Whitespace,
/// A comma.
Comma,
/// A colon.
Colon,
/// A period.
Period,
/// A double period: `..`.
DoublePeriod,
/// A line comment.
LineComment,
/// A block comment.
BlockComment,
/// A function name.
Function,
}
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
impl TryFrom<TokenType> for SemanticTokenType {
type Error = anyhow::Error;
fn try_from(token_type: TokenType) -> Result<Self> {
Ok(match token_type {
TokenType::Number => Self::NUMBER,
TokenType::Word => Self::VARIABLE,
TokenType::Keyword => Self::KEYWORD,
TokenType::Operator => Self::OPERATOR,
TokenType::String => Self::STRING,
TokenType::LineComment => Self::COMMENT,
TokenType::BlockComment => Self::COMMENT,
TokenType::Function => Self::FUNCTION,
TokenType::Whitespace
| TokenType::Brace
| TokenType::Comma
| TokenType::Colon
| TokenType::Period
| TokenType::DoublePeriod => {
anyhow::bail!("unsupported token type: {:?}", token_type)
}
})
}
}
impl TokenType {
// This is for the lsp server.
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
let mut settings = schemars::gen::SchemaSettings::openapi3();
settings.inline_subschemas = true;
let mut generator = schemars::gen::SchemaGenerator::new(settings);
let schema = TokenType::json_schema(&mut generator);
let schemars::schema::Schema::Object(o) = &schema else {
anyhow::bail!("expected object schema: {:#?}", schema);
};
let Some(subschemas) = &o.subschemas else {
anyhow::bail!("expected subschemas: {:#?}", schema);
};
let Some(one_ofs) = &subschemas.one_of else {
anyhow::bail!("expected one_of: {:#?}", schema);
};
let mut semantic_tokens = vec![];
for one_of in one_ofs {
let schemars::schema::Schema::Object(o) = one_of else {
anyhow::bail!("expected object one_of: {:#?}", one_of);
};
let Some(enum_values) = o.enum_values.as_ref() else {
anyhow::bail!("expected enum values: {:#?}", o);
};
if enum_values.len() > 1 {
anyhow::bail!("expected only one enum value: {:#?}", o);
}
if enum_values.is_empty() {
anyhow::bail!("expected at least one enum value: {:#?}", o);
}
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
semantic_tokens.push(semantic_token_type);
}
}
Ok(semantic_tokens)
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
#[ts(export)]
pub struct Token {
#[serde(rename = "type")]
pub token_type: TokenType,
/// Offset in the source code where this token begins.
pub start: usize,
/// Offset in the source code where this token ends.
pub end: usize,
pub value: String,
}
impl Token {
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
Self {
start: range.start,
end: range.end,
value,
token_type,
}
}
pub fn is_code_token(&self) -> bool {
!matches!(
self.token_type,
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
)
}
}
impl From<Token> for crate::executor::SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
}
}
impl From<&Token> for crate::executor::SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
}
}
pub fn lexer(s: &str) -> Vec<Token> {
tokeniser::lexer(s).unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::*;
// We have this as a test so we can ensure it never panics with an unwrap in the server.
#[test]
fn test_token_type_to_semantic_token_type() {
let semantic_types = TokenType::all_semantic_token_types().unwrap();
assert!(!semantic_types.is_empty());
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,666 @@
use std::str::FromStr;
use anyhow::Result;
use lazy_static::lazy_static;
use parse_display::{Display, FromStr};
use regex::bytes::Regex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
/// The types of tokens.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
#[display(style = "camelCase")]
pub enum TokenType {
/// A number.
Number,
/// A word.
Word,
/// An operator.
Operator,
/// A string.
String,
/// A keyword.
Keyword,
/// A brace.
Brace,
/// Whitespace.
Whitespace,
/// A comma.
Comma,
/// A colon.
Colon,
/// A period.
Period,
/// A double period: `..`.
DoublePeriod,
/// A line comment.
LineComment,
/// A block comment.
BlockComment,
/// A function name.
Function,
}
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
impl TryFrom<TokenType> for SemanticTokenType {
type Error = anyhow::Error;
fn try_from(token_type: TokenType) -> Result<Self> {
Ok(match token_type {
TokenType::Number => Self::NUMBER,
TokenType::Word => Self::VARIABLE,
TokenType::Keyword => Self::KEYWORD,
TokenType::Operator => Self::OPERATOR,
TokenType::String => Self::STRING,
TokenType::LineComment => Self::COMMENT,
TokenType::BlockComment => Self::COMMENT,
TokenType::Function => Self::FUNCTION,
TokenType::Whitespace
| TokenType::Brace
| TokenType::Comma
| TokenType::Colon
| TokenType::Period
| TokenType::DoublePeriod => {
anyhow::bail!("unsupported token type: {:?}", token_type)
}
})
}
}
impl TokenType {
// This is for the lsp server.
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
let mut settings = schemars::gen::SchemaSettings::openapi3();
settings.inline_subschemas = true;
let mut generator = schemars::gen::SchemaGenerator::new(settings);
let schema = TokenType::json_schema(&mut generator);
let schemars::schema::Schema::Object(o) = &schema else {
anyhow::bail!("expected object schema: {:#?}", schema);
};
let Some(subschemas) = &o.subschemas else {
anyhow::bail!("expected subschemas: {:#?}", schema);
};
let Some(one_ofs) = &subschemas.one_of else {
anyhow::bail!("expected one_of: {:#?}", schema);
};
let mut semantic_tokens = vec![];
for one_of in one_ofs {
let schemars::schema::Schema::Object(o) = one_of else {
anyhow::bail!("expected object one_of: {:#?}", one_of);
};
let Some(enum_values) = o.enum_values.as_ref() else {
anyhow::bail!("expected enum values: {:#?}", o);
};
if enum_values.len() > 1 {
anyhow::bail!("expected only one enum value: {:#?}", o);
}
if enum_values.is_empty() {
anyhow::bail!("expected at least one enum value: {:#?}", o);
}
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
semantic_tokens.push(semantic_token_type);
}
}
Ok(semantic_tokens)
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
#[ts(export)]
pub struct Token {
#[serde(rename = "type")]
pub token_type: TokenType,
/// Offset in the source code where this token begins.
pub start: usize,
/// Offset in the source code where this token ends.
pub end: usize,
pub value: String,
}
impl From<Token> for crate::executor::SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
}
}
impl From<&Token> for crate::executor::SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
}
}
lazy_static! {
static ref NUMBER: Regex = Regex::new(r"^(\d+(\.\d*)?|\.\d+)\b").unwrap();
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
// TODO: these should be generated using our struct types for these.
static ref KEYWORD: Regex =
Regex::new(r"^(if|else|for|while|return|break|continue|fn|let|mut|loop|true|false|nil|and|or|not|var|const)\b").unwrap();
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
static ref PARAN_END: Regex = Regex::new(r"^\)").unwrap();
static ref ARRAY_START: Regex = Regex::new(r"^\[").unwrap();
static ref ARRAY_END: Regex = Regex::new(r"^\]").unwrap();
static ref COMMA: Regex = Regex::new(r"^,").unwrap();
static ref COLON: Regex = Regex::new(r"^:").unwrap();
static ref PERIOD: Regex = Regex::new(r"^\.").unwrap();
static ref DOUBLE_PERIOD: Regex = Regex::new(r"^\.\.").unwrap();
static ref LINECOMMENT: Regex = Regex::new(r"^//.*").unwrap();
static ref BLOCKCOMMENT: Regex = Regex::new(r"^/\*[\s\S]*?\*/").unwrap();
}
fn is_string(character: &[u8]) -> bool {
match STRING.find(character) {
Some(m) => m.start() == 0,
None => false,
}
}
fn match_first(s: &[u8], regex: &Regex) -> Option<String> {
regex
.find(s)
.map(|the_match| String::from_utf8_lossy(the_match.as_bytes()).into())
}
fn make_token(token_type: TokenType, value: String, start: usize) -> Token {
Token {
token_type,
end: start + value.len(),
value,
start,
}
}
fn return_token_at_index(str_from_index: &[u8], start_index: usize) -> Option<Token> {
if is_string(str_from_index) {
return Some(make_token(
TokenType::String,
match_first(str_from_index, &STRING)?,
start_index,
));
}
if let Some(val) = match_first(str_from_index, &LINECOMMENT) {
return Some(make_token(TokenType::LineComment, val, start_index));
}
if let Some(val) = match_first(str_from_index, &BLOCKCOMMENT) {
return Some(make_token(TokenType::BlockComment, val, start_index));
}
if let Some(val) = match_first(str_from_index, &PARAN_END) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &PARAN_START) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &BLOCK_START) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &BLOCK_END) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &ARRAY_START) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &ARRAY_END) {
return Some(make_token(TokenType::Brace, val, start_index));
}
if let Some(val) = match_first(str_from_index, &COMMA) {
return Some(make_token(TokenType::Comma, val, start_index));
}
if let Some(val) = match_first(str_from_index, &OPERATOR) {
return Some(make_token(TokenType::Operator, val, start_index));
}
if let Some(val) = match_first(str_from_index, &NUMBER) {
return Some(make_token(TokenType::Number, val, start_index));
}
if let Some(val) = match_first(str_from_index, &KEYWORD) {
return Some(make_token(TokenType::Keyword, val, start_index));
}
if let Some(val) = match_first(str_from_index, &WORD) {
return Some(make_token(TokenType::Word, val, start_index));
}
if let Some(val) = match_first(str_from_index, &COLON) {
return Some(make_token(TokenType::Colon, val, start_index));
}
if let Some(val) = match_first(str_from_index, &DOUBLE_PERIOD) {
return Some(make_token(TokenType::DoublePeriod, val, start_index));
}
if let Some(val) = match_first(str_from_index, &PERIOD) {
return Some(make_token(TokenType::Period, val, start_index));
}
if let Some(val) = match_first(str_from_index, &WHITESPACE) {
return Some(make_token(TokenType::Whitespace, val, start_index));
}
None
}
fn recursively_tokenise(s: &[u8], current_index: usize, previous_tokens: Vec<Token>) -> Vec<Token> {
if current_index >= s.len() {
return previous_tokens;
}
let token = return_token_at_index(&s[current_index..], current_index);
let Some(token) = token else {
return recursively_tokenise(s, current_index + 1, previous_tokens);
};
let mut new_tokens = previous_tokens;
let token_length = token.value.len();
new_tokens.push(token);
recursively_tokenise(s, current_index + token_length, new_tokens)
}
pub fn lexer(s: &str) -> Vec<Token> {
recursively_tokenise(s.as_bytes(), 0, Vec::new())
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
fn is_paren_end(character: &[u8]) -> bool {
PARAN_END.is_match(character)
}
fn is_number(character: &[u8]) -> bool {
NUMBER.is_match(character)
}
fn is_whitespace(character: &[u8]) -> bool {
WHITESPACE.is_match(character)
}
fn is_word(character: &[u8]) -> bool {
WORD.is_match(character)
}
fn is_string(character: &[u8]) -> bool {
match STRING.find(character) {
Some(m) => m.start() == 0,
None => false,
}
}
fn is_operator(character: &[u8]) -> bool {
OPERATOR.is_match(character)
}
fn is_block_start(character: &[u8]) -> bool {
BLOCK_START.is_match(character)
}
fn is_block_end(character: &[u8]) -> bool {
BLOCK_END.is_match(character)
}
fn is_paren_start(character: &[u8]) -> bool {
PARAN_START.is_match(character)
}
fn is_comma(character: &[u8]) -> bool {
COMMA.is_match(character)
}
fn is_line_comment(character: &[u8]) -> bool {
LINECOMMENT.is_match(character)
}
fn is_block_comment(character: &[u8]) -> bool {
BLOCKCOMMENT.is_match(character)
}
#[test]
fn is_number_test() {
assert!(is_number("1".as_bytes()));
assert!(is_number("1 abc".as_bytes()));
assert!(is_number("1.1".as_bytes()));
assert!(is_number("1.1 abc".as_bytes()));
assert!(!is_number("a".as_bytes()));
assert!(is_number("1".as_bytes()));
assert!(is_number(".1".as_bytes()));
assert!(is_number("5?".as_bytes()));
assert!(is_number("5 + 6".as_bytes()));
assert!(is_number("5 + a".as_bytes()));
assert!(is_number("5.5".as_bytes()));
assert!(!is_number("1abc".as_bytes()));
assert!(!is_number("a".as_bytes()));
assert!(!is_number("?".as_bytes()));
assert!(!is_number("?5".as_bytes()));
}
#[test]
fn is_whitespace_test() {
assert!(is_whitespace(" ".as_bytes()));
assert!(is_whitespace(" ".as_bytes()));
assert!(is_whitespace(" a".as_bytes()));
assert!(is_whitespace("a ".as_bytes()));
assert!(!is_whitespace("a".as_bytes()));
assert!(!is_whitespace("?".as_bytes()));
}
#[test]
fn is_word_test() {
assert!(is_word("a".as_bytes()));
assert!(is_word("a ".as_bytes()));
assert!(is_word("a5".as_bytes()));
assert!(is_word("a5a".as_bytes()));
assert!(!is_word("5".as_bytes()));
assert!(!is_word("5a".as_bytes()));
assert!(!is_word("5a5".as_bytes()));
}
#[test]
fn is_string_test() {
assert!(is_string("\"\"".as_bytes()));
assert!(is_string("\"a\"".as_bytes()));
assert!(is_string("\"a\" ".as_bytes()));
assert!(is_string("\"a\"5".as_bytes()));
assert!(is_string("'a'5".as_bytes()));
assert!(is_string("\"with escaped \\\" backslash\"".as_bytes()));
assert!(!is_string("\"".as_bytes()));
assert!(!is_string("\"a".as_bytes()));
assert!(!is_string("a\"".as_bytes()));
assert!(!is_string(" \"a\"".as_bytes()));
assert!(!is_string("5\"a\"".as_bytes()));
assert!(!is_string("a + 'str'".as_bytes()));
assert!(is_string("'c'".as_bytes()));
}
#[test]
fn is_operator_test() {
assert!(is_operator("+".as_bytes()));
assert!(is_operator("+ ".as_bytes()));
assert!(is_operator("-".as_bytes()));
assert!(is_operator("<=".as_bytes()));
assert!(is_operator("<= ".as_bytes()));
assert!(is_operator(">=".as_bytes()));
assert!(is_operator(">= ".as_bytes()));
assert!(is_operator("> ".as_bytes()));
assert!(is_operator("< ".as_bytes()));
assert!(is_operator("| ".as_bytes()));
assert!(is_operator("|> ".as_bytes()));
assert!(is_operator("^ ".as_bytes()));
assert!(is_operator("% ".as_bytes()));
assert!(is_operator("+* ".as_bytes()));
assert!(!is_operator("5 + 5".as_bytes()));
assert!(!is_operator("a".as_bytes()));
assert!(!is_operator("a+".as_bytes()));
assert!(!is_operator("a+5".as_bytes()));
assert!(!is_operator("5a+5".as_bytes()));
assert!(!is_operator(", newVar".as_bytes()));
assert!(!is_operator(",".as_bytes()));
}
#[test]
fn is_block_start_test() {
assert!(is_block_start("{".as_bytes()));
assert!(is_block_start("{ ".as_bytes()));
assert!(is_block_start("{5".as_bytes()));
assert!(is_block_start("{a".as_bytes()));
assert!(is_block_start("{5 ".as_bytes()));
assert!(!is_block_start("5".as_bytes()));
assert!(!is_block_start("5 + 5".as_bytes()));
assert!(!is_block_start("5{ + 5".as_bytes()));
assert!(!is_block_start("a{ + 5".as_bytes()));
assert!(!is_block_start(" { + 5".as_bytes()));
}
#[test]
fn is_block_end_test() {
assert!(is_block_end("}".as_bytes()));
assert!(is_block_end("} ".as_bytes()));
assert!(is_block_end("}5".as_bytes()));
assert!(is_block_end("}5 ".as_bytes()));
assert!(!is_block_end("5".as_bytes()));
assert!(!is_block_end("5 + 5".as_bytes()));
assert!(!is_block_end("5} + 5".as_bytes()));
assert!(!is_block_end(" } + 5".as_bytes()));
}
#[test]
fn is_paren_start_test() {
assert!(is_paren_start("(".as_bytes()));
assert!(is_paren_start("( ".as_bytes()));
assert!(is_paren_start("(5".as_bytes()));
assert!(is_paren_start("(5 ".as_bytes()));
assert!(is_paren_start("(5 + 5".as_bytes()));
assert!(is_paren_start("(5 + 5)".as_bytes()));
assert!(is_paren_start("(5 + 5) ".as_bytes()));
assert!(!is_paren_start("5".as_bytes()));
assert!(!is_paren_start("5 + 5".as_bytes()));
assert!(!is_paren_start("5( + 5)".as_bytes()));
assert!(!is_paren_start(" ( + 5)".as_bytes()));
}
#[test]
fn is_paren_end_test() {
assert!(is_paren_end(")".as_bytes()));
assert!(is_paren_end(") ".as_bytes()));
assert!(is_paren_end(")5".as_bytes()));
assert!(is_paren_end(")5 ".as_bytes()));
assert!(!is_paren_end("5".as_bytes()));
assert!(!is_paren_end("5 + 5".as_bytes()));
assert!(!is_paren_end("5) + 5".as_bytes()));
assert!(!is_paren_end(" ) + 5".as_bytes()));
}
#[test]
fn is_comma_test() {
assert!(is_comma(",".as_bytes()));
assert!(is_comma(", ".as_bytes()));
assert!(is_comma(",5".as_bytes()));
assert!(is_comma(",5 ".as_bytes()));
assert!(!is_comma("5".as_bytes()));
assert!(!is_comma("5 + 5".as_bytes()));
assert!(!is_comma("5, + 5".as_bytes()));
assert!(!is_comma(" , + 5".as_bytes()));
}
#[test]
fn is_line_comment_test() {
assert!(is_line_comment("//".as_bytes()));
assert!(is_line_comment("// ".as_bytes()));
assert!(is_line_comment("//5".as_bytes()));
assert!(is_line_comment("//5 ".as_bytes()));
assert!(!is_line_comment("5".as_bytes()));
assert!(!is_line_comment("5 + 5".as_bytes()));
assert!(!is_line_comment("5// + 5".as_bytes()));
assert!(!is_line_comment(" // + 5".as_bytes()));
}
#[test]
fn is_block_comment_test() {
assert!(is_block_comment("/* */".as_bytes()));
assert!(is_block_comment("/***/".as_bytes()));
assert!(is_block_comment("/*5*/".as_bytes()));
assert!(is_block_comment("/*5 */".as_bytes()));
assert!(!is_block_comment("/*".as_bytes()));
assert!(!is_block_comment("5".as_bytes()));
assert!(!is_block_comment("5 + 5".as_bytes()));
assert!(!is_block_comment("5/* + 5".as_bytes()));
assert!(!is_block_comment(" /* + 5".as_bytes()));
assert!(!is_block_comment(
r#" /* and
here
*/
"#
.as_bytes()
));
}
#[test]
fn make_token_test() {
assert_eq!(
make_token(TokenType::Keyword, "const".to_owned(), 56),
Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 56,
end: 61,
}
);
}
#[test]
fn return_token_at_index_test() {
assert_eq!(
return_token_at_index("const".as_bytes(), 0),
Some(Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 0,
end: 5,
})
);
assert_eq!(
return_token_at_index("4554".as_bytes(), 2),
Some(Token {
token_type: TokenType::Number,
value: "4554".to_string(),
start: 2,
end: 6,
})
);
}
#[test]
fn lexer_test() {
assert_eq!(
lexer("const a=5"),
vec![
Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 0,
end: 5,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 5,
end: 6,
},
Token {
token_type: TokenType::Word,
value: "a".to_string(),
start: 6,
end: 7,
},
Token {
token_type: TokenType::Operator,
value: "=".to_string(),
start: 7,
end: 8,
},
Token {
token_type: TokenType::Number,
value: "5".to_string(),
start: 8,
end: 9,
},
]
);
assert_eq!(
lexer("54 + 22500 + 6"),
vec![
Token {
token_type: TokenType::Number,
value: "54".to_string(),
start: 0,
end: 2,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 2,
end: 3,
},
Token {
token_type: TokenType::Operator,
value: "+".to_string(),
start: 3,
end: 4,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 4,
end: 5,
},
Token {
token_type: TokenType::Number,
value: "22500".to_string(),
start: 5,
end: 10,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 10,
end: 11,
},
Token {
token_type: TokenType::Operator,
value: "+".to_string(),
start: 11,
end: 12,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 12,
end: 13,
},
Token {
token_type: TokenType::Number,
value: "6".to_string(),
start: 13,
end: 14,
},
]
);
}
// We have this as a test so we can ensure it never panics with an unwrap in the server.
#[test]
fn test_token_type_to_semantic_token_type() {
let semantic_types = TokenType::all_semantic_token_types().unwrap();
assert!(!semantic_types.is_empty());
}
#[test]
fn test_lexer_negative_word() {
assert_eq!(
lexer("-legX"),
vec![
Token {
token_type: TokenType::Operator,
value: "-".to_string(),
start: 0,
end: 1,
},
Token {
token_type: TokenType::Word,
value: "legX".to_string(),
start: 1,
end: 5,
},
]
);
}
}

View File

@ -84,13 +84,13 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
#[wasm_bindgen]
pub fn lexer_js(js: &str) -> Result<JsValue, JsError> {
let tokens = kcl_lib::token::lexer(js);
let tokens = kcl_lib::tokeniser::lexer(js);
Ok(JsValue::from_serde(&tokens)?)
}
#[wasm_bindgen]
pub fn parse_js(js: &str) -> Result<JsValue, String> {
let tokens = kcl_lib::token::lexer(js);
let tokens = kcl_lib::tokeniser::lexer(js);
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast().map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
@ -149,7 +149,7 @@ pub async fn lsp_run(config: ServerConfig) -> Result<(), JsValue> {
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
// We can unwrap here because we know the tokeniser is valid, since
// we have a test for it.
let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
let token_types = kcl_lib::tokeniser::TokenType::all_semantic_token_types().unwrap();
let (service, socket) = LspService::new(|client| Backend {
client,

View File

@ -306,5 +306,5 @@ const svg = startSketchAt([0, 0])
|> lineTo([13.44, -10.92], %) // HorizontalLineRelative
|> lineTo([13.44, -13.44], %) // VerticalLineHorizonal
|> lineTo([14.28, -13.44], %) // HorizontalLineRelative
|> close(%)
show(svg)
|> close(%);
show(svg);

View File

@ -466,5 +466,5 @@ const svg = startSketchAt([0, 0])
|> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute
|> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute
|> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute
|> close(%)
show(svg)
|> close(%);
show(svg);

View File

@ -32,7 +32,7 @@ async fn execute_and_snapshot(code: &str) -> Result<image::DynamicImage> {
// Create a temporary file to write the output to.
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
let tokens = kcl_lib::token::lexer(code);
let tokens = kcl_lib::tokeniser::lexer(code);
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
@ -210,45 +210,3 @@ show(b2)"#;
1.0,
);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_close_arc() {
let code = r#"const center = [0,0]
const radius = 40
const height = 3
const body = startSketchAt([center[0]+radius, center[1]])
|> arc({angle_end: 360, angle_start: 0, radius: radius}, %)
|> close(%)
|> extrude(height, %)
show(body)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_negative_args() {
let code = r#"const width = 5
const height = 10
const length = 12
fn box = (sk1, sk2, scale) => {
const boxSketch = startSketchAt([sk1, sk2])
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
|> close(%)
|> extrude(scale, %)
return boxSketch
}
box(0, 0, 5)
box(10, 23, 8)
let thing = box(-12, -15, 10)
box(-20, -5, 10)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0);
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

View File

@ -33,7 +33,7 @@ async fn setup(code: &str, name: &str) -> Result<(EngineConnection, Program, uui
.commands_ws(None, None, None, None, Some(false))
.await?;
let tokens = kcl_lib::token::lexer(code);
let tokens = kcl_lib::tokeniser::lexer(code);
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();

View File

@ -1530,10 +1530,10 @@
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
"@kittycad/lib@^0.0.39":
version "0.0.39"
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.39.tgz#e548acf5ff7d45a1f1ec9ad2c61ddcfc30d159b7"
integrity sha512-cB4wNjsKTMpJUn/kMK3qtkVAqB1csSglqThe+bj02nC1kWTB1XgYxksooc/Gzl1MoK1/n0OPQcbOb7Tojb836A==
"@kittycad/lib@^0.0.38":
version "0.0.38"
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.38.tgz#50474266f679990bd414c30f884f2d42a0d5dba9"
integrity sha512-Lv9P7jqVRoGgOnCsRCsG8OwZH5n3scxXYrElR+5/Rsd6/KIarLB4bSBngJrXebOnmTw5md0OPeY+b3ZDbZFDeg==
dependencies:
node-fetch "3.3.2"
openapi-types "^12.0.0"
@ -1883,10 +1883,10 @@
resolved "https://registry.yarnpkg.com/@types/crypto-js/-/crypto-js-4.1.1.tgz#602859584cecc91894eb23a4892f38cfa927890d"
integrity sha512-BG7fQKZ689HIoc5h+6D2Dgq1fABRa0RbBWKBd9SP/MVRVXROflpm5fhwyATX5duFmbStzyzyycPB8qUYKDH3NA==
"@types/debounce-promise@^3.1.6":
version "3.1.6"
resolved "https://registry.yarnpkg.com/@types/debounce-promise/-/debounce-promise-3.1.6.tgz#873e838574011095ed0debf73eed3538e1261d75"
integrity sha512-DowqK95aku+OxMCeG2EQSeXeGeE8OCwLpMsUfIbP7hMF8Otj8eQXnzpwdtIKV+UqQBtkMcF6vbi4Otbh8P/wmg==
"@types/debounce@^1.2.1":
version "1.2.1"
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.1.tgz#79b65710bc8b6d44094d286aecf38e44f9627852"
integrity sha512-epMsEE85fi4lfmJUH/89/iV/LI+F5CvNIvmgs5g5jYFPfhO2S/ae8WSsLOKWdwtoaZw9Q2IhJ4tQ5tFCcS/4HA==
"@types/eslint@^8.4.5":
version "8.44.1"
@ -2806,11 +2806,6 @@ data-uri-to-buffer@^4.0.0:
resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz#d8feb2b2881e6a4f58c2e08acfd0e2834e26222e"
integrity sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==
debounce-promise@^3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/debounce-promise/-/debounce-promise-3.1.2.tgz#320fb8c7d15a344455cd33cee5ab63530b6dc7c5"
integrity sha512-rZHcgBkbYavBeD9ej6sP56XfG53d51CD4dnaw989YX/nZ/ZJfgRx/9ePKmTNiUiyQvh4mtrMoS3OAWW+yoYtpg==
debug@^3.2.7:
version "3.2.7"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a"