Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
12b3717eb5 | |||
0bc685b0c4 | |||
9ee032771a | |||
c307ddd1b1 | |||
a30818ff2b | |||
53e763d938 | |||
8f74cd1d0c | |||
c271942897 | |||
a03d09b41d | |||
2971b7752b | |||
70e99eb00b | |||
5c66af59d2 | |||
6dda6daeef |
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "untitled-app",
|
||||
"version": "0.9.1",
|
||||
"version": "0.9.2",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.9.0",
|
||||
@ -10,7 +10,7 @@
|
||||
"@fortawesome/react-fontawesome": "^0.2.0",
|
||||
"@headlessui/react": "^1.7.13",
|
||||
"@headlessui/tailwindcss": "^0.2.0",
|
||||
"@kittycad/lib": "^0.0.38",
|
||||
"@kittycad/lib": "^0.0.39",
|
||||
"@lezer/javascript": "^1.4.7",
|
||||
"@open-rpc/client-js": "^1.8.1",
|
||||
"@react-hook/resize-observer": "^1.2.6",
|
||||
@ -102,7 +102,6 @@
|
||||
"@babel/preset-env": "^7.22.9",
|
||||
"@tauri-apps/cli": "^1.3.1",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/debounce": "^1.2.1",
|
||||
"@types/debounce-promise": "^3.1.6",
|
||||
"@types/isomorphic-fetch": "^0.0.36",
|
||||
"@types/react-modal": "^3.16.0",
|
||||
|
2
src-tauri/Cargo.lock
generated
2
src-tauri/Cargo.lock
generated
@ -3775,7 +3775,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tauri-plugin-fs-extra"
|
||||
version = "0.0.0"
|
||||
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#0190f68f1dff80576595a1b79e31338a3e9ebba1"
|
||||
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#b04bde3461066c709d6801cf9ca305cf889a8394"
|
||||
dependencies = [
|
||||
"log",
|
||||
"serde",
|
||||
|
@ -8,7 +8,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "kittycad-modeling",
|
||||
"version": "0.9.1"
|
||||
"version": "0.9.2"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
12
src/App.tsx
12
src/App.tsx
@ -31,6 +31,7 @@ import { TextEditor } from 'components/TextEditor'
|
||||
import { Themes, getSystemTheme } from 'lib/theme'
|
||||
import { useSetupEngineManager } from 'hooks/useSetupEngineManager'
|
||||
import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions'
|
||||
import { engineCommandManager } from './lang/std/engineConnection'
|
||||
|
||||
export function App() {
|
||||
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
|
||||
@ -39,7 +40,6 @@ export function App() {
|
||||
useHotKeyListener()
|
||||
const {
|
||||
setCode,
|
||||
engineCommandManager,
|
||||
buttonDownInStream,
|
||||
openPanes,
|
||||
setOpenPanes,
|
||||
@ -52,7 +52,6 @@ export function App() {
|
||||
guiMode: s.guiMode,
|
||||
setGuiMode: s.setGuiMode,
|
||||
setCode: s.setCode,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
buttonDownInStream: s.buttonDownInStream,
|
||||
openPanes: s.openPanes,
|
||||
setOpenPanes: s.setOpenPanes,
|
||||
@ -91,12 +90,12 @@ export function App() {
|
||||
if (guiMode.sketchMode === 'sketchEdit') {
|
||||
// TODO: share this with Toolbar's "Exit sketch" button
|
||||
// exiting sketch should be done consistently across all exits
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'edit_mode_exit' },
|
||||
})
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||
@ -107,7 +106,7 @@ export function App() {
|
||||
// when exiting sketch mode in the future
|
||||
executeAst()
|
||||
} else {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
@ -156,7 +155,7 @@ export function App() {
|
||||
useEngineConnectionSubscriptions()
|
||||
|
||||
const debounceSocketSend = throttle<EngineCommand>((message) => {
|
||||
engineCommandManager?.sendSceneCommand(message)
|
||||
engineCommandManager.sendSceneCommand(message)
|
||||
}, 16)
|
||||
const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => {
|
||||
e.nativeEvent.preventDefault()
|
||||
@ -216,7 +215,6 @@ export function App() {
|
||||
} else if (interactionGuards.zoom.dragCallback(eWithButton)) {
|
||||
interaction = 'zoom'
|
||||
} else {
|
||||
console.log('none')
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ import styles from './Toolbar.module.css'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { useAppMode } from 'hooks/useAppMode'
|
||||
import { ActionIcon } from 'components/ActionIcon'
|
||||
import { engineCommandManager } from './lang/std/engineConnection'
|
||||
|
||||
export const sketchButtonClassnames = {
|
||||
background:
|
||||
@ -50,7 +51,6 @@ export const Toolbar = () => {
|
||||
ast,
|
||||
updateAst,
|
||||
programMemory,
|
||||
engineCommandManager,
|
||||
executeAst,
|
||||
} = useStore((s) => ({
|
||||
guiMode: s.guiMode,
|
||||
@ -59,15 +59,10 @@ export const Toolbar = () => {
|
||||
ast: s.ast,
|
||||
updateAst: s.updateAst,
|
||||
programMemory: s.programMemory,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
executeAst: s.executeAst,
|
||||
}))
|
||||
useAppMode()
|
||||
|
||||
useEffect(() => {
|
||||
console.log('guiMode', guiMode)
|
||||
}, [guiMode])
|
||||
|
||||
function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) {
|
||||
return (
|
||||
<span className={styles.toolbarButtons + ' ' + className}>
|
||||
@ -173,12 +168,12 @@ export const Toolbar = () => {
|
||||
{guiMode.mode === 'sketch' && (
|
||||
<button
|
||||
onClick={() => {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'edit_mode_exit' },
|
||||
})
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||
@ -214,7 +209,7 @@ export const Toolbar = () => {
|
||||
<button
|
||||
key={sketchFnName}
|
||||
onClick={() => {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
} from '../lang/modifyAst'
|
||||
import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst'
|
||||
import { useStore } from '../useStore'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
export const AvailableVars = ({
|
||||
onVarClick,
|
||||
@ -92,14 +93,11 @@ export function useCalc({
|
||||
newVariableInsertIndex: number
|
||||
setNewVariableName: (a: string) => void
|
||||
} {
|
||||
const { ast, programMemory, selectionRange, engineCommandManager } = useStore(
|
||||
(s) => ({
|
||||
ast: s.ast,
|
||||
programMemory: s.programMemory,
|
||||
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
})
|
||||
)
|
||||
const { ast, programMemory, selectionRange } = useStore((s) => ({
|
||||
ast: s.ast,
|
||||
programMemory: s.programMemory,
|
||||
selectionRange: s.selectionRanges.codeBasedSelections[0].range,
|
||||
}))
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const [availableVarInfo, setAvailableVarInfo] = useState<
|
||||
ReturnType<typeof findAllPreviousVariables>
|
||||
@ -140,7 +138,6 @@ export function useCalc({
|
||||
}, [ast, programMemory, selectionRange])
|
||||
|
||||
useEffect(() => {
|
||||
if (!engineCommandManager) return
|
||||
try {
|
||||
const code = `const __result__ = ${value}\nshow(__result__)`
|
||||
const ast = parser_wasm(code)
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
|
||||
import { useStore } from '../useStore'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { EngineCommand } from '../lang/std/engineConnection'
|
||||
import { useState } from 'react'
|
||||
@ -7,6 +6,7 @@ import { ActionButton } from '../components/ActionButton'
|
||||
import { faCheck } from '@fortawesome/free-solid-svg-icons'
|
||||
import { isReducedMotion } from 'lang/util'
|
||||
import { AstExplorer } from './AstExplorer'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
type SketchModeCmd = Extract<
|
||||
Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'],
|
||||
@ -14,9 +14,6 @@ type SketchModeCmd = Extract<
|
||||
>
|
||||
|
||||
export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
|
||||
const { engineCommandManager } = useStore((s) => ({
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
}))
|
||||
const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({
|
||||
type: 'default_camera_enable_sketch_mode',
|
||||
origin: { x: 0, y: 0, z: 0 },
|
||||
@ -70,19 +67,18 @@ export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
|
||||
className="w-16"
|
||||
type="checkbox"
|
||||
checked={sketchModeCmd.ortho}
|
||||
onChange={(a) => {
|
||||
console.log(a, (a as any).checked)
|
||||
onChange={(a) =>
|
||||
setSketchModeCmd({
|
||||
...sketchModeCmd,
|
||||
ortho: a.target.checked,
|
||||
})
|
||||
}}
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<ActionButton
|
||||
Element="button"
|
||||
onClick={() => {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: sketchModeCmd,
|
||||
cmd_id: uuidv4(),
|
||||
|
@ -1,11 +1,11 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { useStore } from '../useStore'
|
||||
import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons'
|
||||
import { ActionButton } from './ActionButton'
|
||||
import Modal from 'react-modal'
|
||||
import React from 'react'
|
||||
import { useFormik } from 'formik'
|
||||
import { Models } from '@kittycad/lib'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
type OutputFormat = Models['OutputFormat_type']
|
||||
|
||||
@ -18,10 +18,6 @@ interface ExportButtonProps extends React.PropsWithChildren {
|
||||
}
|
||||
|
||||
export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
||||
const { engineCommandManager } = useStore((s) => ({
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
}))
|
||||
|
||||
const [modalIsOpen, setIsOpen] = React.useState(false)
|
||||
|
||||
const defaultType = 'gltf'
|
||||
@ -66,7 +62,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
|
||||
},
|
||||
}
|
||||
}
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'export',
|
||||
|
@ -25,6 +25,7 @@ import { modify_ast_for_sketch } from '../wasm-lib/pkg/wasm_lib'
|
||||
import { KCLError } from 'lang/errors'
|
||||
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
|
||||
import { rangeTypeFix } from 'lang/abstractSyntaxTree'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
export const Stream = ({ className = '' }) => {
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
@ -32,7 +33,6 @@ export const Stream = ({ className = '' }) => {
|
||||
const videoRef = useRef<HTMLVideoElement>(null)
|
||||
const {
|
||||
mediaStream,
|
||||
engineCommandManager,
|
||||
setButtonDownInStream,
|
||||
didDragInStream,
|
||||
setDidDragInStream,
|
||||
@ -45,7 +45,6 @@ export const Stream = ({ className = '' }) => {
|
||||
programMemory,
|
||||
} = useStore((s) => ({
|
||||
mediaStream: s.mediaStream,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
setButtonDownInStream: s.setButtonDownInStream,
|
||||
fileId: s.fileId,
|
||||
didDragInStream: s.didDragInStream,
|
||||
@ -73,7 +72,7 @@ export const Stream = ({ className = '' }) => {
|
||||
if (!videoRef.current) return
|
||||
if (!mediaStream) return
|
||||
videoRef.current.srcObject = mediaStream
|
||||
}, [mediaStream, engineCommandManager])
|
||||
}, [mediaStream])
|
||||
|
||||
const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => {
|
||||
if (!videoRef.current) return
|
||||
@ -107,7 +106,7 @@ export const Stream = ({ className = '' }) => {
|
||||
}
|
||||
|
||||
if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'handle_mouse_drag_start',
|
||||
@ -121,7 +120,7 @@ export const Stream = ({ className = '' }) => {
|
||||
guiMode.sketchMode === ('sketch_line' as any)
|
||||
)
|
||||
) {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'camera_drag_start',
|
||||
@ -139,7 +138,7 @@ export const Stream = ({ className = '' }) => {
|
||||
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
|
||||
if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return
|
||||
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'default_camera_zoom',
|
||||
@ -177,7 +176,7 @@ export const Stream = ({ className = '' }) => {
|
||||
}
|
||||
|
||||
if (!didDragInStream) {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'select_with_point',
|
||||
@ -214,7 +213,7 @@ export const Stream = ({ className = '' }) => {
|
||||
window: { x, y },
|
||||
}
|
||||
}
|
||||
engineCommandManager?.sendSceneCommand(command).then(async (resp) => {
|
||||
engineCommandManager.sendSceneCommand(command).then(async (resp) => {
|
||||
if (!(guiMode.mode === 'sketch')) return
|
||||
|
||||
if (guiMode.sketchMode === 'selectFace') return
|
||||
@ -240,9 +239,6 @@ export const Stream = ({ className = '' }) => {
|
||||
) {
|
||||
// Let's get the updated ast.
|
||||
if (sketchGroupId === '') return
|
||||
|
||||
console.log('guiMode.pathId', guiMode.pathId)
|
||||
|
||||
// We have a problem if we do not have an id for the sketch group.
|
||||
if (
|
||||
guiMode.pathId === undefined ||
|
||||
@ -285,7 +281,7 @@ export const Stream = ({ className = '' }) => {
|
||||
guiMode.waitingFirstClick &&
|
||||
!isEditingExistingSketch
|
||||
) {
|
||||
const curve = await engineCommandManager?.sendSceneCommand({
|
||||
const curve = await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
@ -326,7 +322,7 @@ export const Stream = ({ className = '' }) => {
|
||||
resp?.data?.data?.entities_modified?.length &&
|
||||
(!guiMode.waitingFirstClick || isEditingExistingSketch)
|
||||
) {
|
||||
const curve = await engineCommandManager?.sendSceneCommand({
|
||||
const curve = await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
@ -371,12 +367,12 @@ export const Stream = ({ className = '' }) => {
|
||||
setGuiMode({
|
||||
mode: 'default',
|
||||
})
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'edit_mode_exit' },
|
||||
})
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: { type: 'default_camera_disable_sketch_mode' },
|
||||
|
@ -30,6 +30,7 @@ import { isOverlap, roundOff } from 'lib/utils'
|
||||
import { kclErrToDiagnostic } from 'lang/errors'
|
||||
import { CSSRuleObject } from 'tailwindcss/types/config'
|
||||
import interact from '@replit/codemirror-interact'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
export const editorShortcutMeta = {
|
||||
formatCode: {
|
||||
@ -52,7 +53,6 @@ export const TextEditor = ({
|
||||
code,
|
||||
deferredSetCode,
|
||||
editorView,
|
||||
engineCommandManager,
|
||||
formatCode,
|
||||
isLSPServerReady,
|
||||
selectionRanges,
|
||||
@ -64,7 +64,6 @@ export const TextEditor = ({
|
||||
code: s.code,
|
||||
deferredSetCode: s.deferredSetCode,
|
||||
editorView: s.editorView,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
formatCode: s.formatCode,
|
||||
isLSPServerReady: s.isLSPServerReady,
|
||||
selectionRanges: s.selectionRanges,
|
||||
@ -173,7 +172,7 @@ export const TextEditor = ({
|
||||
const idBasedSelections = codeBasedSelections
|
||||
.map(({ type, range }) => {
|
||||
const hasOverlap = Object.entries(
|
||||
engineCommandManager?.sourceRangeMap || {}
|
||||
engineCommandManager.sourceRangeMap || {}
|
||||
).filter(([_, sourceRange]) => {
|
||||
return isOverlap(sourceRange, range)
|
||||
})
|
||||
@ -186,7 +185,7 @@ export const TextEditor = ({
|
||||
})
|
||||
.filter(Boolean) as any
|
||||
|
||||
engineCommandManager?.cusorsSelected({
|
||||
engineCommandManager.cusorsSelected({
|
||||
otherSelections: [],
|
||||
idBasedSelections,
|
||||
})
|
||||
|
@ -133,7 +133,7 @@ export const SetAbsDistance = ({ buttonType }: { buttonType: ButtonType }) => {
|
||||
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
||||
})
|
||||
} catch (e) {
|
||||
console.log('e', e)
|
||||
console.log('error', e)
|
||||
}
|
||||
}}
|
||||
disabled={!enableAngLen}
|
||||
|
@ -147,7 +147,7 @@ export const SetAngleLength = ({
|
||||
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
|
||||
})
|
||||
} catch (e) {
|
||||
console.log('e', e)
|
||||
console.log('erorr', e)
|
||||
}
|
||||
}}
|
||||
disabled={!enableAngLen}
|
||||
|
@ -109,7 +109,6 @@ export default class Client extends jsrpc.JSONRPCServerAndClient {
|
||||
}
|
||||
}
|
||||
messageString += message
|
||||
// console.log(messageString)
|
||||
return
|
||||
})
|
||||
|
||||
|
@ -96,8 +96,6 @@ export class LanguageServerPlugin implements PluginValue {
|
||||
async sendChange({ documentText }: { documentText: string }) {
|
||||
if (!this.client.ready) return
|
||||
|
||||
console.log(documentText.length)
|
||||
|
||||
if (documentText.length > 5000) {
|
||||
// Clear out the text it thinks we have, large documents will throw a stack error.
|
||||
// This is obviously not a good fix but it works for now til we figure
|
||||
|
@ -8,6 +8,7 @@ import { ArtifactMap, EngineCommandManager } from 'lang/std/engineConnection'
|
||||
import { Models } from '@kittycad/lib/dist/types/src'
|
||||
import { isReducedMotion } from 'lang/util'
|
||||
import { isOverlap } from 'lib/utils'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
interface DefaultPlanes {
|
||||
xy: string
|
||||
@ -17,19 +18,13 @@ interface DefaultPlanes {
|
||||
}
|
||||
|
||||
export function useAppMode() {
|
||||
const {
|
||||
guiMode,
|
||||
setGuiMode,
|
||||
selectionRanges,
|
||||
engineCommandManager,
|
||||
selectionRangeTypeMap,
|
||||
} = useStore((s) => ({
|
||||
guiMode: s.guiMode,
|
||||
setGuiMode: s.setGuiMode,
|
||||
selectionRanges: s.selectionRanges,
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
selectionRangeTypeMap: s.selectionRangeTypeMap,
|
||||
}))
|
||||
const { guiMode, setGuiMode, selectionRanges, selectionRangeTypeMap } =
|
||||
useStore((s) => ({
|
||||
guiMode: s.guiMode,
|
||||
setGuiMode: s.setGuiMode,
|
||||
selectionRanges: s.selectionRanges,
|
||||
selectionRangeTypeMap: s.selectionRangeTypeMap,
|
||||
}))
|
||||
const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null)
|
||||
useEffect(() => {
|
||||
if (
|
||||
@ -65,7 +60,7 @@ export function useAppMode() {
|
||||
setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true)
|
||||
// TODO figure out the plane to use based on the sketch
|
||||
// maybe it's easier to make a new plane than rely on the defaults
|
||||
await engineCommandManager?.sendSceneCommand({
|
||||
await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
@ -135,7 +130,7 @@ export function useAppMode() {
|
||||
])
|
||||
|
||||
useEffect(() => {
|
||||
const unSub = engineCommandManager?.subscribeTo({
|
||||
const unSub = engineCommandManager.subscribeTo({
|
||||
event: 'select_with_point',
|
||||
callback: async ({ data }) => {
|
||||
if (!data.entity_id) return
|
||||
@ -144,18 +139,16 @@ export function useAppMode() {
|
||||
// user clicked something else in the scene
|
||||
return
|
||||
}
|
||||
const sketchModeResponse = await engineCommandManager?.sendSceneCommand(
|
||||
{
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'sketch_mode_enable',
|
||||
plane_id: data.entity_id,
|
||||
ortho: true,
|
||||
animated: !isReducedMotion(),
|
||||
},
|
||||
}
|
||||
)
|
||||
const sketchModeResponse = await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'sketch_mode_enable',
|
||||
plane_id: data.entity_id,
|
||||
ortho: true,
|
||||
animated: !isReducedMotion(),
|
||||
},
|
||||
})
|
||||
setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true)
|
||||
const sketchUuid = uuidv4()
|
||||
const proms: any[] = []
|
||||
@ -178,8 +171,7 @@ export function useAppMode() {
|
||||
},
|
||||
})
|
||||
)
|
||||
const res = await Promise.all(proms)
|
||||
console.log('res', res)
|
||||
await Promise.all(proms)
|
||||
setGuiMode({
|
||||
mode: 'sketch',
|
||||
sketchMode: 'sketchEdit',
|
||||
@ -209,7 +201,7 @@ async function createPlane(
|
||||
}
|
||||
) {
|
||||
const planeId = uuidv4()
|
||||
await engineCommandManager?.sendSceneCommand({
|
||||
await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'make_plane',
|
||||
@ -221,7 +213,7 @@ async function createPlane(
|
||||
},
|
||||
cmd_id: planeId,
|
||||
})
|
||||
await engineCommandManager?.sendSceneCommand({
|
||||
await engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd: {
|
||||
type: 'plane_set_color',
|
||||
@ -234,12 +226,12 @@ async function createPlane(
|
||||
}
|
||||
|
||||
function setDefaultPlanesHidden(
|
||||
engineCommandManager: EngineCommandManager | undefined,
|
||||
engineCommandManager: EngineCommandManager,
|
||||
defaultPlanes: DefaultPlanes,
|
||||
hidden: boolean
|
||||
) {
|
||||
Object.values(defaultPlanes).forEach((planeId) => {
|
||||
engineCommandManager?.sendSceneCommand({
|
||||
engineCommandManager.sendSceneCommand({
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
|
@ -1,14 +1,9 @@
|
||||
import { useEffect } from 'react'
|
||||
import { useStore } from 'useStore'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
|
||||
export function useEngineConnectionSubscriptions() {
|
||||
const {
|
||||
engineCommandManager,
|
||||
setCursor2,
|
||||
setHighlightRange,
|
||||
highlightRange,
|
||||
} = useStore((s) => ({
|
||||
engineCommandManager: s.engineCommandManager,
|
||||
const { setCursor2, setHighlightRange, highlightRange } = useStore((s) => ({
|
||||
setCursor2: s.setCursor2,
|
||||
setHighlightRange: s.setHighlightRange,
|
||||
highlightRange: s.highlightRange,
|
||||
|
@ -1,53 +1,90 @@
|
||||
import { useLayoutEffect } from 'react'
|
||||
import { useLayoutEffect, useEffect, useRef } from 'react'
|
||||
import { _executor } from '../lang/executor'
|
||||
import { useStore } from '../useStore'
|
||||
import { EngineCommandManager } from '../lang/std/engineConnection'
|
||||
import { engineCommandManager } from '../lang/std/engineConnection'
|
||||
import { deferExecution } from 'lib/utils'
|
||||
|
||||
export function useSetupEngineManager(
|
||||
streamRef: React.RefObject<HTMLDivElement>,
|
||||
token?: string
|
||||
) {
|
||||
const {
|
||||
setEngineCommandManager,
|
||||
setMediaStream,
|
||||
setIsStreamReady,
|
||||
setStreamDimensions,
|
||||
executeCode,
|
||||
streamDimensions,
|
||||
} = useStore((s) => ({
|
||||
setEngineCommandManager: s.setEngineCommandManager,
|
||||
setMediaStream: s.setMediaStream,
|
||||
setIsStreamReady: s.setIsStreamReady,
|
||||
setStreamDimensions: s.setStreamDimensions,
|
||||
executeCode: s.executeCode,
|
||||
streamDimensions: s.streamDimensions,
|
||||
}))
|
||||
|
||||
const streamWidth = streamRef?.current?.offsetWidth
|
||||
const streamHeight = streamRef?.current?.offsetHeight
|
||||
|
||||
const hasSetNonZeroDimensions = useRef<boolean>(false)
|
||||
|
||||
useLayoutEffect(() => {
|
||||
// Load the engine command manager once with the initial width and height,
|
||||
// then we do not want to reload it.
|
||||
const { width: quadWidth, height: quadHeight } = getDimensions(
|
||||
streamWidth,
|
||||
streamHeight
|
||||
)
|
||||
if (!hasSetNonZeroDimensions.current && quadHeight && quadWidth) {
|
||||
engineCommandManager.start({
|
||||
setMediaStream,
|
||||
setIsStreamReady,
|
||||
width: quadWidth,
|
||||
height: quadHeight,
|
||||
token,
|
||||
})
|
||||
engineCommandManager.waitForReady.then(() => {
|
||||
executeCode()
|
||||
})
|
||||
setStreamDimensions({
|
||||
streamWidth: quadWidth,
|
||||
streamHeight: quadHeight,
|
||||
})
|
||||
hasSetNonZeroDimensions.current = true
|
||||
}
|
||||
}, [streamRef?.current?.offsetWidth, streamRef?.current?.offsetHeight])
|
||||
|
||||
useEffect(() => {
|
||||
const handleResize = deferExecution(() => {
|
||||
const { width, height } = getDimensions(
|
||||
streamRef?.current?.offsetWidth,
|
||||
streamRef?.current?.offsetHeight
|
||||
)
|
||||
if (
|
||||
streamDimensions.streamWidth !== width ||
|
||||
streamDimensions.streamHeight !== height
|
||||
) {
|
||||
engineCommandManager.handleResize({
|
||||
streamWidth: width,
|
||||
streamHeight: height,
|
||||
})
|
||||
setStreamDimensions({
|
||||
streamWidth: width,
|
||||
streamHeight: height,
|
||||
})
|
||||
}
|
||||
}, 500)
|
||||
|
||||
window.addEventListener('resize', handleResize)
|
||||
return () => {
|
||||
window.removeEventListener('resize', handleResize)
|
||||
}
|
||||
}, [])
|
||||
}
|
||||
|
||||
function getDimensions(streamWidth?: number, streamHeight?: number) {
|
||||
const width = streamWidth ? streamWidth : 0
|
||||
const quadWidth = Math.round(width / 4) * 4
|
||||
const height = streamHeight ? streamHeight : 0
|
||||
const quadHeight = Math.round(height / 4) * 4
|
||||
|
||||
useLayoutEffect(() => {
|
||||
setStreamDimensions({
|
||||
streamWidth: quadWidth,
|
||||
streamHeight: quadHeight,
|
||||
})
|
||||
if (!width || !height) return
|
||||
const eng = new EngineCommandManager({
|
||||
setMediaStream,
|
||||
setIsStreamReady,
|
||||
width: quadWidth,
|
||||
height: quadHeight,
|
||||
token,
|
||||
})
|
||||
setEngineCommandManager(eng)
|
||||
eng.waitForReady.then(() => {
|
||||
executeCode()
|
||||
})
|
||||
return () => {
|
||||
eng?.tearDown()
|
||||
}
|
||||
}, [quadWidth, quadHeight])
|
||||
return { width: quadWidth, height: quadHeight }
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ export function useConvertToVariable() {
|
||||
|
||||
updateAst(_modifiedAst, true)
|
||||
} catch (e) {
|
||||
console.log('e', e)
|
||||
console.log('error', e)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1691,7 +1691,6 @@ describe('parsing errors', () => {
|
||||
let _theError
|
||||
try {
|
||||
const result = expect(parser_wasm(code))
|
||||
console.log('result', result)
|
||||
} catch (e) {
|
||||
_theError = e
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ export const recast = (ast: Program): string => {
|
||||
return s
|
||||
} catch (e) {
|
||||
// TODO: do something real with the error.
|
||||
console.log('recast', e)
|
||||
console.log('recast error', e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
@ -595,7 +595,12 @@ export class EngineCommandManager {
|
||||
[localUnsubscribeId: string]: (a: any) => void
|
||||
}
|
||||
} = {} as any
|
||||
constructor({
|
||||
|
||||
constructor() {
|
||||
this.engineConnection = undefined
|
||||
}
|
||||
|
||||
start({
|
||||
setMediaStream,
|
||||
setIsStreamReady,
|
||||
width,
|
||||
@ -608,6 +613,16 @@ export class EngineCommandManager {
|
||||
height: number
|
||||
token?: string
|
||||
}) {
|
||||
if (width === 0 || height === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
// If we already have an engine connection, just need to resize the stream.
|
||||
if (this.engineConnection) {
|
||||
this.handleResize({ streamWidth: width, streamHeight: height })
|
||||
return
|
||||
}
|
||||
|
||||
this.waitForReady = new Promise((resolve) => {
|
||||
this.resolveReady = resolve
|
||||
})
|
||||
@ -689,7 +704,35 @@ export class EngineCommandManager {
|
||||
|
||||
this.engineConnection?.connect()
|
||||
}
|
||||
handleResize({
|
||||
streamWidth,
|
||||
streamHeight,
|
||||
}: {
|
||||
streamWidth: number
|
||||
streamHeight: number
|
||||
}) {
|
||||
console.log('handleResize', streamWidth, streamHeight)
|
||||
if (!this.engineConnection?.isReady()) {
|
||||
return
|
||||
}
|
||||
|
||||
const resizeCmd: EngineCommand = {
|
||||
type: 'modeling_cmd_req',
|
||||
cmd_id: uuidv4(),
|
||||
cmd: {
|
||||
type: 'reconfigure_stream',
|
||||
width: streamWidth,
|
||||
height: streamHeight,
|
||||
fps: 60,
|
||||
},
|
||||
}
|
||||
this.engineConnection?.send(resizeCmd)
|
||||
}
|
||||
handleModelingCommand(message: WebSocketResponse, id: string) {
|
||||
if (this.engineConnection === undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
if (message.type !== 'modeling') {
|
||||
return
|
||||
}
|
||||
@ -854,6 +897,9 @@ export class EngineCommandManager {
|
||||
})
|
||||
}
|
||||
sendSceneCommand(command: EngineCommand): Promise<any> {
|
||||
if (this.engineConnection === undefined) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
if (
|
||||
command.type === 'modeling_cmd_req' &&
|
||||
command.cmd.type !== lastMessage
|
||||
@ -905,6 +951,9 @@ export class EngineCommandManager {
|
||||
range: SourceRange
|
||||
command: EngineCommand | string
|
||||
}): Promise<any> {
|
||||
if (this.engineConnection === undefined) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
this.sourceRangeMap[id] = range
|
||||
|
||||
if (!this.engineConnection?.isReady()) {
|
||||
@ -950,6 +999,9 @@ export class EngineCommandManager {
|
||||
rangeStr: string,
|
||||
commandStr: string
|
||||
): Promise<any> {
|
||||
if (this.engineConnection === undefined) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
if (id === undefined) {
|
||||
throw new Error('id is undefined')
|
||||
}
|
||||
@ -1000,6 +1052,9 @@ export class EngineCommandManager {
|
||||
}
|
||||
}
|
||||
private async fixIdMappings(ast: Program, programMemory: ProgramMemory) {
|
||||
if (this.engineConnection === undefined) {
|
||||
return
|
||||
}
|
||||
/* This is a temporary solution since the cmd_ids that are sent through when
|
||||
sending 'extend_path' ids are not used as the segment ids.
|
||||
|
||||
@ -1079,3 +1134,5 @@ export class EngineCommandManager {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const engineCommandManager = new EngineCommandManager()
|
||||
|
@ -1279,7 +1279,7 @@ export function getTransformInfos(
|
||||
}) as TransformInfo[]
|
||||
return theTransforms
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
console.log('error', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ export async function asyncLexer(str: string): Promise<Token[]> {
|
||||
return tokens
|
||||
} catch (e) {
|
||||
// TODO: do something real with the error.
|
||||
console.log('lexer', e)
|
||||
console.log('lexer error', e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
@ -22,7 +22,7 @@ export function lexer(str: string): Token[] {
|
||||
return tokens
|
||||
} catch (e) {
|
||||
// TODO: do something real with the error.
|
||||
console.log('lexer', e)
|
||||
console.log('lexer error', e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
@ -39,6 +39,6 @@ export async function exportSave(data: ArrayBuffer) {
|
||||
}
|
||||
} catch (e) {
|
||||
// TODO: do something real with the error.
|
||||
console.log('export', e)
|
||||
console.log('export error', e)
|
||||
}
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ export async function initializeProjectDirectory(directory: string) {
|
||||
try {
|
||||
docDirectory = await documentDir()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
console.log('error', e)
|
||||
docDirectory = await homeDir() // seems to work better on Linux
|
||||
}
|
||||
|
||||
|
@ -75,11 +75,12 @@ export async function executor(
|
||||
ast: Program,
|
||||
pm: ProgramMemory = { root: {}, return: null }
|
||||
): Promise<ProgramMemory> {
|
||||
const engineCommandManager = new EngineCommandManager({
|
||||
const engineCommandManager = new EngineCommandManager()
|
||||
engineCommandManager.start({
|
||||
setIsStreamReady: () => {},
|
||||
setMediaStream: () => {},
|
||||
width: 100,
|
||||
height: 100,
|
||||
width: 0,
|
||||
height: 0,
|
||||
})
|
||||
await engineCommandManager.waitForReady
|
||||
engineCommandManager.startNewSession()
|
||||
|
@ -19,6 +19,7 @@ import { KCLError } from './lang/errors'
|
||||
import { deferExecution } from 'lib/utils'
|
||||
import { _executor } from './lang/executor'
|
||||
import { bracket } from 'lib/exampleKcl'
|
||||
import { engineCommandManager } from './lang/std/engineConnection'
|
||||
|
||||
export type Selection = {
|
||||
type: 'default' | 'line-end' | 'line-mid'
|
||||
@ -162,8 +163,6 @@ export interface StoreState {
|
||||
setProgramMemory: (programMemory: ProgramMemory) => void
|
||||
isShiftDown: boolean
|
||||
setIsShiftDown: (isShiftDown: boolean) => void
|
||||
engineCommandManager?: EngineCommandManager
|
||||
setEngineCommandManager: (engineCommandManager: EngineCommandManager) => void
|
||||
mediaStream?: MediaStream
|
||||
setMediaStream: (mediaStream: MediaStream) => void
|
||||
isStreamReady: boolean
|
||||
@ -226,7 +225,7 @@ export const useStore = create<StoreState>()(
|
||||
const result = await executeCode({
|
||||
code: code || get().code,
|
||||
lastAst: get().ast,
|
||||
engineCommandManager: get().engineCommandManager,
|
||||
engineCommandManager: engineCommandManager,
|
||||
})
|
||||
if (!result.isChange) {
|
||||
return
|
||||
@ -332,8 +331,6 @@ export const useStore = create<StoreState>()(
|
||||
executeAst: async (ast) => {
|
||||
const _ast = ast || get().ast
|
||||
if (!get().isStreamReady) return
|
||||
const engineCommandManager = get().engineCommandManager!
|
||||
if (!engineCommandManager) return
|
||||
|
||||
set({ isExecuting: true })
|
||||
const { logs, errors, programMemory } = await executeAst({
|
||||
@ -350,8 +347,6 @@ export const useStore = create<StoreState>()(
|
||||
executeAstMock: async (ast) => {
|
||||
const _ast = ast || get().ast
|
||||
if (!get().isStreamReady) return
|
||||
const engineCommandManager = get().engineCommandManager!
|
||||
if (!engineCommandManager) return
|
||||
|
||||
const { logs, errors, programMemory } = await executeAst({
|
||||
ast: _ast,
|
||||
@ -435,8 +430,6 @@ export const useStore = create<StoreState>()(
|
||||
setProgramMemory: (programMemory) => set({ programMemory }),
|
||||
isShiftDown: false,
|
||||
setIsShiftDown: (isShiftDown) => set({ isShiftDown }),
|
||||
setEngineCommandManager: (engineCommandManager) =>
|
||||
set({ engineCommandManager }),
|
||||
setMediaStream: (mediaStream) => set({ mediaStream }),
|
||||
isStreamReady: false,
|
||||
setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
|
||||
@ -454,7 +447,9 @@ export const useStore = create<StoreState>()(
|
||||
fileId: '',
|
||||
setFileId: (fileId) => set({ fileId }),
|
||||
streamDimensions: { streamWidth: 1280, streamHeight: 720 },
|
||||
setStreamDimensions: (streamDimensions) => set({ streamDimensions }),
|
||||
setStreamDimensions: (streamDimensions) => {
|
||||
set({ streamDimensions })
|
||||
},
|
||||
isExecuting: false,
|
||||
setIsExecuting: (isExecuting) => set({ isExecuting }),
|
||||
|
||||
@ -519,7 +514,7 @@ async function executeCode({
|
||||
}: {
|
||||
code: string
|
||||
lastAst: Program
|
||||
engineCommandManager?: EngineCommandManager
|
||||
engineCommandManager: EngineCommandManager
|
||||
}): Promise<
|
||||
| {
|
||||
logs: string[]
|
||||
@ -539,7 +534,7 @@ async function executeCode({
|
||||
if (e instanceof KCLError) {
|
||||
errors = [e]
|
||||
logs = []
|
||||
if (e.msg === 'file is empty') engineCommandManager?.endSession()
|
||||
if (e.msg === 'file is empty') engineCommandManager.endSession()
|
||||
}
|
||||
return {
|
||||
isChange: true,
|
||||
@ -562,7 +557,7 @@ async function executeCode({
|
||||
}
|
||||
// Check if the ast we have is equal to the ast in the storage.
|
||||
// If it is, we don't need to update the ast.
|
||||
if (!engineCommandManager || JSON.stringify(ast) === JSON.stringify(lastAst))
|
||||
if (JSON.stringify(ast) === JSON.stringify(lastAst))
|
||||
return { isChange: false }
|
||||
|
||||
const { logs, errors, programMemory } = await executeAst({
|
||||
|
19
src/wasm-lib/Cargo.lock
generated
19
src/wasm-lib/Cargo.lock
generated
@ -1394,7 +1394,6 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"parse-display",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"schemars",
|
||||
"serde",
|
||||
@ -1408,6 +1407,7 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3081,9 +3081,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio-tungstenite"
|
||||
version = "0.20.0"
|
||||
version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2"
|
||||
checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"log",
|
||||
@ -3303,9 +3303,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.20.0"
|
||||
version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
|
||||
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"bytes",
|
||||
@ -3792,6 +3792,15 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.50.0"
|
||||
|
@ -18,13 +18,13 @@ derive-docs = { path = "../derive-docs" }
|
||||
kittycad = { version = "0.2.25", default-features = false, features = ["js"] }
|
||||
lazy_static = "1.4.0"
|
||||
parse-display = "0.8.2"
|
||||
regex = "1.7.1"
|
||||
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
|
||||
serde = { version = "1.0.188", features = ["derive"] }
|
||||
serde_json = "1.0.107"
|
||||
thiserror = "1.0.48"
|
||||
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
|
||||
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
|
||||
winnow = "0.5.15"
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
js-sys = { version = "0.3.64" }
|
||||
|
@ -1,24 +1,32 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
pub fn criterion_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("parse + lex cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
|
||||
c.bench_function("parse + lex big kitt", |b| {
|
||||
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/kittycad_svg.kcl")))
|
||||
});
|
||||
c.bench_function("parse + lex pipes_on_pipes", |b| {
|
||||
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl")))
|
||||
});
|
||||
pub fn bench_lex(c: &mut Criterion) {
|
||||
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
|
||||
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
|
||||
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
|
||||
}
|
||||
|
||||
pub fn bench_lex_parse(c: &mut Criterion) {
|
||||
c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
|
||||
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM)));
|
||||
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM)));
|
||||
}
|
||||
|
||||
fn lex(program: &str) {
|
||||
black_box(kcl_lib::token::lexer(program));
|
||||
}
|
||||
|
||||
fn lex_and_parse(program: &str) {
|
||||
let tokens = kcl_lib::tokeniser::lexer(program);
|
||||
let tokens = kcl_lib::token::lexer(program);
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
black_box(parser.ast().unwrap());
|
||||
}
|
||||
|
||||
criterion_group!(benches, criterion_benchmark);
|
||||
criterion_group!(benches, bench_lex, bench_lex_parse);
|
||||
criterion_main!(benches);
|
||||
|
||||
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
|
||||
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
|
||||
const sg = startSketchAt(pos)
|
||||
|> line([0, scale], %)
|
||||
|
15
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
15
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
@ -709,7 +709,6 @@ dependencies = [
|
||||
"kittycad",
|
||||
"lazy_static",
|
||||
"parse-display",
|
||||
"regex",
|
||||
"reqwest",
|
||||
"schemars",
|
||||
"serde",
|
||||
@ -723,6 +722,7 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1878,9 +1878,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tungstenite"
|
||||
version = "0.20.0"
|
||||
version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
|
||||
checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"bytes",
|
||||
@ -2158,6 +2158,15 @@ version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winreg"
|
||||
version = "0.50.0"
|
||||
|
@ -166,7 +166,7 @@ pub async fn modify_ast_for_sketch(
|
||||
let recasted = program.recast(&FormatOptions::default(), 0);
|
||||
|
||||
// Re-parse the ast so we get the correct source ranges.
|
||||
let tokens = crate::tokeniser::lexer(&recasted);
|
||||
let tokens = crate::token::lexer(&recasted);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
*program = parser.ast()?;
|
||||
|
||||
|
@ -2691,7 +2691,7 @@ fn ghi = (x) => {
|
||||
}
|
||||
|
||||
show(part001)"#;
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
let symbols = program.get_lsp_symbols(code);
|
||||
@ -2719,7 +2719,7 @@ show(part001)
|
||||
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
||||
|> line([0.4900857016, -0.0240763666], %)
|
||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2738,7 +2738,7 @@ show(part001)
|
||||
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|
||||
|> line([0.4900857016, -0.0240763666], %) // hello world
|
||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2757,7 +2757,7 @@ show(part001)
|
||||
|> line([0.4900857016, -0.0240763666], %)
|
||||
// hello world
|
||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2783,7 +2783,7 @@ show(part001)
|
||||
// this is also a comment
|
||||
return things
|
||||
}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2820,7 +2820,7 @@ const mySk1 = startSketchAt([0, 0])
|
||||
|> ry(45, %)
|
||||
|> rx(45, %)
|
||||
// one more for good measure"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2859,7 +2859,7 @@ a comment between pipe expression statements */
|
||||
|> line([-0.42, -1.72], %)
|
||||
|
||||
show(part001)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2885,7 +2885,7 @@ const yo = [
|
||||
" hey oooooo really long long long"
|
||||
]
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2903,7 +2903,7 @@ const key = 'c'
|
||||
const things = "things"
|
||||
|
||||
// this is also a comment"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2921,7 +2921,7 @@ const things = "things"
|
||||
// a comment
|
||||
"
|
||||
}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2946,7 +2946,7 @@ const part001 = startSketchAt([0, 0])
|
||||
-angleToMatchLengthY('seg01', myVar, %),
|
||||
myVar
|
||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -2972,7 +2972,7 @@ const part001 = startSketchAt([0, 0])
|
||||
myVar
|
||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -3003,7 +3003,7 @@ fn ghi = (part001) => {
|
||||
}
|
||||
|
||||
show(part001)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let mut program = parser.ast().unwrap();
|
||||
program.rename_symbol("mySuperCoolPart", 6);
|
||||
@ -3034,7 +3034,7 @@ show(mySuperCoolPart)
|
||||
let some_program_string = r#"fn ghi = (x, y, z) => {
|
||||
return x
|
||||
}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let mut program = parser.ast().unwrap();
|
||||
program.rename_symbol("newName", 10);
|
||||
@ -3063,7 +3063,7 @@ const firstExtrude = startSketchAt([0,0])
|
||||
|> extrude(h, %)
|
||||
|
||||
show(firstExtrude)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -3089,7 +3089,7 @@ show(firstExtrude)
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_recast_math_start_negative() {
|
||||
let some_program_string = r#"const myVar = -5 + 6"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
@ -3105,7 +3105,7 @@ const FOS = 2
|
||||
const sigmaAllow = 8
|
||||
const width = 20
|
||||
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast().unwrap();
|
||||
|
||||
|
@ -620,6 +620,22 @@ pub async fn execute(
|
||||
let result = call_expr.execute(memory, &mut pipe_info, engine).await?;
|
||||
args.push(result);
|
||||
}
|
||||
Value::BinaryExpression(binary_expression) => {
|
||||
let result = binary_expression.get_result(memory, &mut pipe_info, engine).await?;
|
||||
args.push(result);
|
||||
}
|
||||
Value::UnaryExpression(unary_expression) => {
|
||||
let result = unary_expression.get_result(memory, &mut pipe_info, engine).await?;
|
||||
args.push(result);
|
||||
}
|
||||
Value::ObjectExpression(object_expression) => {
|
||||
let result = object_expression.execute(memory, &mut pipe_info, engine).await?;
|
||||
args.push(result);
|
||||
}
|
||||
Value::ArrayExpression(array_expression) => {
|
||||
let result = array_expression.execute(memory, &mut pipe_info, engine).await?;
|
||||
args.push(result);
|
||||
}
|
||||
// We do nothing for the rest.
|
||||
_ => (),
|
||||
}
|
||||
@ -679,7 +695,7 @@ pub async fn execute(
|
||||
message: format!(
|
||||
"Expected {} arguments, got {}",
|
||||
function_expression.params.len(),
|
||||
args.len()
|
||||
args.len(),
|
||||
),
|
||||
source_ranges: vec![(&function_expression).into()],
|
||||
}));
|
||||
@ -804,7 +820,7 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let program = parser.ast()?;
|
||||
let mut mem: ProgramMemory = Default::default();
|
||||
|
@ -9,4 +9,4 @@ pub mod math_parser;
|
||||
pub mod parser;
|
||||
pub mod server;
|
||||
pub mod std;
|
||||
pub mod tokeniser;
|
||||
pub mod token;
|
||||
|
@ -10,8 +10,8 @@ use crate::{
|
||||
},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
executor::SourceRange,
|
||||
parser::{is_not_code_token, Parser},
|
||||
tokeniser::{Token, TokenType},
|
||||
parser::Parser,
|
||||
token::{Token, TokenType},
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||
@ -334,7 +334,7 @@ impl ReversePolishNotation {
|
||||
return rpn.parse();
|
||||
}
|
||||
|
||||
if is_not_code_token(current_token) {
|
||||
if !current_token.is_code_token() {
|
||||
let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators);
|
||||
return rpn.parse();
|
||||
}
|
||||
@ -704,7 +704,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression() {
|
||||
let tokens = crate::tokeniser::lexer("1 + 2");
|
||||
let tokens = crate::token::lexer("1 + 2");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -731,7 +731,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_add_no_spaces() {
|
||||
let tokens = crate::tokeniser::lexer("1+2");
|
||||
let tokens = crate::token::lexer("1+2");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -758,7 +758,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_sub_no_spaces() {
|
||||
let tokens = crate::tokeniser::lexer("1 -2");
|
||||
let tokens = crate::token::lexer("1 -2");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -785,7 +785,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_plus_followed_by_star() {
|
||||
let tokens = crate::tokeniser::lexer("1 + 2 * 3");
|
||||
let tokens = crate::token::lexer("1 + 2 * 3");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -823,7 +823,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_with_parentheses() {
|
||||
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )");
|
||||
let tokens = crate::token::lexer("1 * ( 2 + 3 )");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -861,7 +861,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_parens_in_middle() {
|
||||
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4");
|
||||
let tokens = crate::token::lexer("1 * ( 2 + 3 ) / 4");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -910,7 +910,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_parse_expression_parans_and_predence() {
|
||||
let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4");
|
||||
let tokens = crate::token::lexer("1 + ( 2 + 3 ) / 4");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -958,7 +958,7 @@ mod test {
|
||||
}
|
||||
#[test]
|
||||
fn test_parse_expression_nested() {
|
||||
let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
|
||||
let tokens = crate::token::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -1017,7 +1017,7 @@ mod test {
|
||||
}
|
||||
#[test]
|
||||
fn test_parse_expression_redundant_braces() {
|
||||
let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))");
|
||||
let tokens = crate::token::lexer("1 * ((( 2 + 3 )))");
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -1055,7 +1055,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_reverse_polish_notation_simple() {
|
||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2"), &[], &[]);
|
||||
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2"), &[], &[]);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
@ -1084,7 +1084,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_reverse_polish_notation_complex() {
|
||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2 * 3"), &[], &[]);
|
||||
let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2 * 3"), &[], &[]);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
@ -1125,7 +1125,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_reverse_polish_notation_complex_with_parentheses() {
|
||||
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &[], &[]);
|
||||
let parser = ReversePolishNotation::new(&crate::token::lexer("1 * ( 2 + 3 )"), &[], &[]);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
@ -1179,7 +1179,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_parse_expression_redundant_braces_around_literal() {
|
||||
let code = "2 + (((3)))";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse().unwrap();
|
||||
assert_eq!(
|
||||
@ -1274,7 +1274,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_parse_expression_braces_around_lots_of_math() {
|
||||
let code = "(distance * p * FOS * 6 / (sigmaAllow * width))";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse();
|
||||
assert!(result.is_ok());
|
||||
@ -1283,7 +1283,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_parse_expression_braces_around_internals_lots_of_math() {
|
||||
let code = "distance * p * FOS * 6 / (sigmaAllow * width)";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let mut parser = MathParser::new(&tokens);
|
||||
let result = parser.parse();
|
||||
assert!(result.is_ok());
|
||||
|
@ -10,7 +10,7 @@ use crate::{
|
||||
},
|
||||
errors::{KclError, KclErrorDetails},
|
||||
math_parser::MathParser,
|
||||
tokeniser::{Token, TokenType},
|
||||
token::{Token, TokenType},
|
||||
};
|
||||
|
||||
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
|
||||
@ -249,7 +249,7 @@ impl Parser {
|
||||
}
|
||||
|
||||
let current_token = self.get_token(index)?;
|
||||
if is_not_code_token(current_token) {
|
||||
if !current_token.is_code_token() {
|
||||
return self.find_end_of_non_code_node(index + 1);
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ impl Parser {
|
||||
}
|
||||
|
||||
let current_token = self.get_token(index)?;
|
||||
if is_not_code_token(current_token) {
|
||||
if !current_token.is_code_token() {
|
||||
return self.find_start_of_non_code_node(index - 1);
|
||||
}
|
||||
|
||||
@ -365,7 +365,7 @@ impl Parser {
|
||||
});
|
||||
};
|
||||
|
||||
if is_not_code_token(token) {
|
||||
if !token.is_code_token() {
|
||||
let non_code_node = self.make_non_code_node(new_index)?;
|
||||
let new_new_index = non_code_node.1 + 1;
|
||||
let bonus_non_code_node = non_code_node.0;
|
||||
@ -1623,7 +1623,7 @@ impl Parser {
|
||||
});
|
||||
}
|
||||
|
||||
if is_not_code_token(token) {
|
||||
if !token.is_code_token() {
|
||||
let next_token = self.next_meaningful_token(token_index, Some(0))?;
|
||||
if let Some(node) = &next_token.non_code_node {
|
||||
if previous_body.is_empty() {
|
||||
@ -1788,12 +1788,6 @@ impl Parser {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_not_code_token(token: &Token) -> bool {
|
||||
token.token_type == TokenType::Whitespace
|
||||
|| token.token_type == TokenType::LineComment
|
||||
|| token.token_type == TokenType::BlockComment
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
@ -1803,7 +1797,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_make_identifier() {
|
||||
let tokens = crate::tokeniser::lexer("a");
|
||||
let tokens = crate::token::lexer("a");
|
||||
let parser = Parser::new(tokens);
|
||||
let identifier = parser.make_identifier(0).unwrap();
|
||||
assert_eq!(
|
||||
@ -1818,7 +1812,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_make_identifier_with_const_myvar_equals_5_and_index_2() {
|
||||
let tokens = crate::tokeniser::lexer("const myVar = 5");
|
||||
let tokens = crate::token::lexer("const myVar = 5");
|
||||
let parser = Parser::new(tokens);
|
||||
let identifier = parser.make_identifier(2).unwrap();
|
||||
assert_eq!(
|
||||
@ -1833,7 +1827,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_make_identifier_multiline() {
|
||||
let tokens = crate::tokeniser::lexer("const myVar = 5\nconst newVar = myVar + 1");
|
||||
let tokens = crate::token::lexer("const myVar = 5\nconst newVar = myVar + 1");
|
||||
let parser = Parser::new(tokens);
|
||||
let identifier = parser.make_identifier(2).unwrap();
|
||||
assert_eq!(
|
||||
@ -1857,7 +1851,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_make_identifier_call_expression() {
|
||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let parser = Parser::new(tokens);
|
||||
let identifier = parser.make_identifier(0).unwrap();
|
||||
assert_eq!(
|
||||
@ -1880,7 +1874,7 @@ mod tests {
|
||||
}
|
||||
#[test]
|
||||
fn test_make_non_code_node() {
|
||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let parser = Parser::new(tokens);
|
||||
let index = 4;
|
||||
let expected_output = (None, 4);
|
||||
@ -1889,7 +1883,7 @@ mod tests {
|
||||
let index = 7;
|
||||
let expected_output = (None, 7);
|
||||
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"
|
||||
const yo = { a: { b: { c: '123' } } }
|
||||
// this is a comment
|
||||
@ -1920,7 +1914,7 @@ const key = 'c'"#,
|
||||
31,
|
||||
);
|
||||
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const mySketch = startSketchAt([0,0])
|
||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||
|> lineTo([1, 1], %) /* this is
|
||||
@ -1946,7 +1940,7 @@ const key = 'c'"#,
|
||||
|
||||
#[test]
|
||||
fn test_collect_object_keys() {
|
||||
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
||||
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
|
||||
let parser = Parser::new(tokens);
|
||||
let keys_info = parser.collect_object_keys(6, None, false).unwrap();
|
||||
assert_eq!(keys_info.len(), 2);
|
||||
@ -1966,7 +1960,7 @@ const key = 'c'"#,
|
||||
|
||||
#[test]
|
||||
fn test_make_literal_call_expression() {
|
||||
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
|
||||
let parser = Parser::new(tokens);
|
||||
let literal = parser.make_literal(2).unwrap();
|
||||
assert_eq!(
|
||||
@ -1990,74 +1984,88 @@ const key = 'c'"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_code_token() {
|
||||
let tokens = [
|
||||
Token {
|
||||
token_type: TokenType::Word,
|
||||
start: 0,
|
||||
end: 3,
|
||||
value: "log".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Brace,
|
||||
start: 3,
|
||||
end: 4,
|
||||
value: "(".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Number,
|
||||
start: 4,
|
||||
end: 5,
|
||||
value: "5".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Comma,
|
||||
start: 5,
|
||||
end: 6,
|
||||
value: ",".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::String,
|
||||
start: 7,
|
||||
end: 14,
|
||||
value: "\"hello\"".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Word,
|
||||
start: 16,
|
||||
end: 27,
|
||||
value: "aIdentifier".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Brace,
|
||||
start: 27,
|
||||
end: 28,
|
||||
value: ")".to_string(),
|
||||
},
|
||||
];
|
||||
for (i, token) in tokens.iter().enumerate() {
|
||||
assert!(token.is_code_token(), "failed test {i}: {token:?}")
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_not_code_token() {
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Word,
|
||||
start: 0,
|
||||
end: 3,
|
||||
value: "log".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Brace,
|
||||
start: 3,
|
||||
end: 4,
|
||||
value: "(".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Number,
|
||||
start: 4,
|
||||
end: 5,
|
||||
value: "5".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Comma,
|
||||
start: 5,
|
||||
end: 6,
|
||||
value: ",".to_string(),
|
||||
}));
|
||||
assert!(is_not_code_token(&Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
start: 6,
|
||||
end: 7,
|
||||
value: " ".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::String,
|
||||
start: 7,
|
||||
end: 14,
|
||||
value: "\"hello\"".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Word,
|
||||
start: 16,
|
||||
end: 27,
|
||||
value: "aIdentifier".to_string(),
|
||||
}));
|
||||
assert!(!is_not_code_token(&Token {
|
||||
token_type: TokenType::Brace,
|
||||
start: 27,
|
||||
end: 28,
|
||||
value: ")".to_string(),
|
||||
}));
|
||||
assert!(is_not_code_token(&Token {
|
||||
token_type: TokenType::BlockComment,
|
||||
start: 28,
|
||||
end: 30,
|
||||
value: "/* abte */".to_string(),
|
||||
}));
|
||||
assert!(is_not_code_token(&Token {
|
||||
token_type: TokenType::LineComment,
|
||||
start: 30,
|
||||
end: 33,
|
||||
value: "// yoyo a line".to_string(),
|
||||
}));
|
||||
let tokens = [
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
start: 6,
|
||||
end: 7,
|
||||
value: " ".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::BlockComment,
|
||||
start: 28,
|
||||
end: 30,
|
||||
value: "/* abte */".to_string(),
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::LineComment,
|
||||
start: 30,
|
||||
end: 33,
|
||||
value: "// yoyo a line".to_string(),
|
||||
},
|
||||
];
|
||||
for (i, token) in tokens.iter().enumerate() {
|
||||
assert!(!token.is_code_token(), "failed test {i}: {token:?}")
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_next_meaningful_token() {
|
||||
let _offset = 1;
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const mySketch = startSketchAt([0,0])
|
||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||
|> lineTo([1, 1], %) /* this is
|
||||
@ -2443,7 +2451,7 @@ const key = 'c'"#,
|
||||
|
||||
#[test]
|
||||
fn test_find_closing_brace() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const mySketch = startSketchAt([0,0])
|
||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||
|> lineTo([1, 1], %) /* this is
|
||||
@ -2460,16 +2468,16 @@ const key = 'c'"#,
|
||||
assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92);
|
||||
|
||||
let basic = "( hey )";
|
||||
let parser = Parser::new(crate::tokeniser::lexer(basic));
|
||||
let parser = Parser::new(crate::token::lexer(basic));
|
||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4);
|
||||
|
||||
let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)";
|
||||
let parser = Parser::new(crate::tokeniser::lexer(handles_non_zero_index));
|
||||
let parser = Parser::new(crate::token::lexer(handles_non_zero_index));
|
||||
assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4);
|
||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6);
|
||||
|
||||
let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }";
|
||||
let parser = Parser::new(crate::tokeniser::lexer(handles_nested));
|
||||
let parser = Parser::new(crate::token::lexer(handles_nested));
|
||||
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18);
|
||||
|
||||
// TODO expect error when not started on a brace
|
||||
@ -2477,7 +2485,7 @@ const key = 'c'"#,
|
||||
|
||||
#[test]
|
||||
fn test_is_call_expression() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const mySketch = startSketchAt([0,0])
|
||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||
|> lineTo([1, 1], %) /* this is
|
||||
@ -2498,7 +2506,7 @@ const key = 'c'"#,
|
||||
|
||||
#[test]
|
||||
fn test_find_next_declaration_keyword() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const mySketch = startSketchAt([0,0])
|
||||
|> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|
||||
|> lineTo([1, 1], %) /* this is
|
||||
@ -2513,7 +2521,7 @@ const key = 'c'"#,
|
||||
TokenReturn { token: None, index: 92 }
|
||||
);
|
||||
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const myVar = 5
|
||||
const newVar = myVar + 1
|
||||
"#,
|
||||
@ -2543,7 +2551,7 @@ const newVar = myVar + 1
|
||||
lineTo(2, 3)
|
||||
} |> rx(45, %)
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
assert_eq!(
|
||||
parser.has_pipe_operator(0, None).unwrap(),
|
||||
@ -2562,7 +2570,7 @@ const newVar = myVar + 1
|
||||
lineTo(2, 3)
|
||||
} |> rx(45, %) |> rx(45, %)
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
assert_eq!(
|
||||
parser.has_pipe_operator(0, None).unwrap(),
|
||||
@ -2584,7 +2592,7 @@ const newVar = myVar + 1
|
||||
const yo = myFunc(9()
|
||||
|> rx(45, %)
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
assert_eq!(
|
||||
parser.has_pipe_operator(0, None).unwrap(),
|
||||
@ -2596,7 +2604,7 @@ const yo = myFunc(9()
|
||||
);
|
||||
|
||||
let code = "const myVar2 = 5 + 1 |> myFn(%)";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
assert_eq!(
|
||||
parser.has_pipe_operator(1, None).unwrap(),
|
||||
@ -2618,7 +2626,7 @@ const yo = myFunc(9()
|
||||
lineTo(1,1)
|
||||
} |> rx(90, %)
|
||||
show(mySk1)"#;
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap();
|
||||
// loop through getting the token and it's index
|
||||
@ -2658,7 +2666,7 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_make_member_expression() {
|
||||
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
||||
let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
|
||||
let parser = Parser::new(tokens);
|
||||
let member_expression_return = parser.make_member_expression(6).unwrap();
|
||||
let member_expression = member_expression_return.expression;
|
||||
@ -2700,63 +2708,63 @@ show(mySk1)"#;
|
||||
#[test]
|
||||
fn test_find_end_of_binary_expression() {
|
||||
let code = "1 + 2 * 3\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].value, "3");
|
||||
|
||||
let code = "(1 + 25) / 5 - 3\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].value, "3");
|
||||
let index_of_5 = code.find('5').unwrap();
|
||||
let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap();
|
||||
assert_eq!(end_starting_at_the_5, end);
|
||||
// whole thing wraped
|
||||
// whole thing wrapped
|
||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2);
|
||||
// whole thing wraped but given index after the first brace
|
||||
// whole thing wrapped but given index after the first brace
|
||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(1).unwrap();
|
||||
assert_eq!(tokens[end].value, "3");
|
||||
// given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)'
|
||||
let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(2).unwrap();
|
||||
assert_eq!(tokens[end].value, "2");
|
||||
// lots of silly nesting
|
||||
let code = "(1 + 2) / (5 - (3))\nconst yo = 5";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
||||
// with pipe operator at the end
|
||||
let code = "(1 + 2) / (5 - (3))\n |> fn(%)";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
|
||||
// with call expression at the start of binary expression
|
||||
let code = "yo(2) + 3\n |> fn(%)";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].value, "3");
|
||||
// with call expression at the end of binary expression
|
||||
let code = "3 + yo(2)\n |> fn(%)";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
let _end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
// with call expression at the end of binary expression
|
||||
let code = "-legX + 2, ";
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens.clone());
|
||||
let end = parser.find_end_of_binary_expression(0).unwrap();
|
||||
assert_eq!(tokens[end].value, "2");
|
||||
@ -2765,7 +2773,7 @@ show(mySk1)"#;
|
||||
#[test]
|
||||
fn test_make_array_expression() {
|
||||
// input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]}
|
||||
let tokens = crate::tokeniser::lexer("const yo = [\"1\", 2, three]");
|
||||
let tokens = crate::token::lexer("const yo = [\"1\", 2, three]");
|
||||
let parser = Parser::new(tokens);
|
||||
let array_expression = parser.make_array_expression(6).unwrap();
|
||||
let expression = array_expression.expression;
|
||||
@ -2804,7 +2812,7 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_make_call_expression() {
|
||||
let tokens = crate::tokeniser::lexer("foo(\"a\", a, 3)");
|
||||
let tokens = crate::token::lexer("foo(\"a\", a, 3)");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.make_call_expression(0).unwrap();
|
||||
assert_eq!(result.last_index, 9);
|
||||
@ -2838,7 +2846,7 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_make_variable_declaration() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const yo = startSketch([0, 0])
|
||||
|> lineTo([1, myVar], %)
|
||||
|> foo(myVar2, %)
|
||||
@ -2908,7 +2916,7 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_make_body() {
|
||||
let tokens = crate::tokeniser::lexer("const myVar = 5");
|
||||
let tokens = crate::token::lexer("const myVar = 5");
|
||||
let parser = Parser::new(tokens);
|
||||
let body = parser
|
||||
.make_body(
|
||||
@ -2926,7 +2934,7 @@ show(mySk1)"#;
|
||||
#[test]
|
||||
fn test_abstract_syntax_tree() {
|
||||
let code = "5 +6";
|
||||
let parser = Parser::new(crate::tokeniser::lexer(code));
|
||||
let parser = Parser::new(crate::token::lexer(code));
|
||||
let result = parser.ast().unwrap();
|
||||
let expected_result = Program {
|
||||
start: 0,
|
||||
@ -2964,8 +2972,8 @@ show(mySk1)"#;
|
||||
#[test]
|
||||
fn test_empty_file() {
|
||||
let some_program_string = r#""#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("file is empty"));
|
||||
@ -2973,7 +2981,7 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_parse_half_pipe_small() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
"const secondExtrude = startSketchAt([0,0])
|
||||
|",
|
||||
);
|
||||
@ -2985,14 +2993,14 @@ show(mySk1)"#;
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_double_nested_braces() {
|
||||
let tokens = crate::tokeniser::lexer(r#"const prop = yo["one"][two]"#);
|
||||
let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_period_number_first() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = 1 - obj.a"#,
|
||||
);
|
||||
@ -3002,7 +3010,7 @@ const height = 1 - obj.a"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_brace_number_first() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = 1 - obj["a"]"#,
|
||||
);
|
||||
@ -3012,7 +3020,7 @@ const height = 1 - obj["a"]"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_brace_number_second() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = obj["a"] - 1"#,
|
||||
);
|
||||
@ -3022,7 +3030,7 @@ const height = obj["a"] - 1"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_in_array_number_first() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = [1 - obj["a"], 0]"#,
|
||||
);
|
||||
@ -3032,7 +3040,7 @@ const height = [1 - obj["a"], 0]"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_in_array_number_second() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = [obj["a"] - 1, 0]"#,
|
||||
);
|
||||
@ -3042,7 +3050,7 @@ const height = [obj["a"] - 1, 0]"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const obj = { a: 1, b: 2 }
|
||||
const height = [obj["a"] -1, 0]"#,
|
||||
);
|
||||
@ -3052,7 +3060,7 @@ const height = [obj["a"] -1, 0]"#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_half_pipe() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
"const height = 10
|
||||
|
||||
const firstExtrude = startSketchAt([0,0])
|
||||
@ -3075,15 +3083,17 @@ const secondExtrude = startSketchAt([0,0])
|
||||
|
||||
#[test]
|
||||
fn test_parse_greater_bang() {
|
||||
let tokens = crate::tokeniser::lexer(">!");
|
||||
let tokens = crate::token::lexer(">!");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_ok());
|
||||
let err = parser.ast().unwrap_err();
|
||||
// TODO: Better errors when program cannot tokenize.
|
||||
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||
assert!(err.to_string().contains("file is empty"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_z_percent_parens() {
|
||||
let tokens = crate::tokeniser::lexer("z%)");
|
||||
let tokens = crate::token::lexer("z%)");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
@ -3092,15 +3102,17 @@ const secondExtrude = startSketchAt([0,0])
|
||||
|
||||
#[test]
|
||||
fn test_parse_parens_unicode() {
|
||||
let tokens = crate::tokeniser::lexer("(ޜ");
|
||||
let tokens = crate::token::lexer("(ޜ");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_ok());
|
||||
// TODO: Better errors when program cannot tokenize.
|
||||
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_negative_in_array_binary_expression() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"const leg1 = 5
|
||||
const thickness = 0.56
|
||||
|
||||
@ -3114,7 +3126,7 @@ const bracket = [-leg2 + thickness, 0]
|
||||
|
||||
#[test]
|
||||
fn test_parse_nested_open_brackets() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"
|
||||
z(-[["#,
|
||||
);
|
||||
@ -3129,31 +3141,38 @@ z(-[["#,
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_new_line_function() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"z
|
||||
(--#"#,
|
||||
);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
// TODO: Better errors when program cannot tokenize.
|
||||
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||
assert_eq!(
|
||||
result.err().unwrap().to_string(),
|
||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 1])], message: "missing a closing brace for the function call" }"#
|
||||
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_lots_of_fancy_brackets() {
|
||||
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
||||
let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||
// TODO: Better errors when program cannot tokenize.
|
||||
// https://github.com/KittyCAD/modeling-app/issues/696
|
||||
assert_eq!(
|
||||
result.err().unwrap().to_string(),
|
||||
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_close_before_open() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"fn)n
|
||||
e
|
||||
["#,
|
||||
@ -3170,7 +3189,7 @@ e
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_close_before_nada() {
|
||||
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
|
||||
let tokens = crate::token::lexer(r#"fn)n-"#);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
@ -3179,7 +3198,7 @@ e
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_lots_of_slashes() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
let tokens = crate::token::lexer(
|
||||
r#"J///////////o//+///////////P++++*++++++P///////˟
|
||||
++4"#,
|
||||
);
|
||||
@ -3196,7 +3215,7 @@ e
|
||||
#[test]
|
||||
fn test_parse_expand_array() {
|
||||
let code = "const myArray = [0..10]";
|
||||
let parser = Parser::new(crate::tokeniser::lexer(code));
|
||||
let parser = Parser::new(crate::token::lexer(code));
|
||||
let result = parser.ast().unwrap();
|
||||
let expected_result = Program {
|
||||
start: 0,
|
||||
@ -3299,8 +3318,8 @@ e
|
||||
#[test]
|
||||
fn test_error_keyword_in_variable() {
|
||||
let some_program_string = r#"const let = "thing""#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3312,8 +3331,8 @@ e
|
||||
#[test]
|
||||
fn test_error_keyword_in_fn_name() {
|
||||
let some_program_string = r#"fn let = () {}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3325,8 +3344,8 @@ e
|
||||
#[test]
|
||||
fn test_error_stdlib_in_fn_name() {
|
||||
let some_program_string = r#"fn cos = () {}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3340,8 +3359,8 @@ e
|
||||
let some_program_string = r#"fn thing = (let) => {
|
||||
return 1
|
||||
}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3355,8 +3374,8 @@ e
|
||||
let some_program_string = r#"fn thing = (cos) => {
|
||||
return 1
|
||||
}"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3373,8 +3392,8 @@ e
|
||||
}
|
||||
firstPrimeNumber()
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(program);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(program);
|
||||
let parser = Parser::new(tokens);
|
||||
let _ast = parser.ast().unwrap();
|
||||
}
|
||||
|
||||
@ -3386,8 +3405,8 @@ e
|
||||
|
||||
thing(false)
|
||||
"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
|
||||
@ -3403,8 +3422,8 @@ thing(false)
|
||||
"#,
|
||||
name
|
||||
);
|
||||
let tokens = crate::tokeniser::lexer(&some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(&some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3421,8 +3440,8 @@ thing(false)
|
||||
#[test]
|
||||
fn test_error_define_var_as_function() {
|
||||
let some_program_string = r#"fn thing = "thing""#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
@ -3450,8 +3469,8 @@ const pt2 = b2[0]
|
||||
|
||||
show(b1)
|
||||
show(b2)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
|
||||
@ -3459,18 +3478,36 @@ show(b2)"#;
|
||||
fn test_math_with_stdlib() {
|
||||
let some_program_string = r#"const d2r = pi() / 2
|
||||
let other_thing = 2 * cos(3)"#;
|
||||
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore] // ignore until more stack fixes
|
||||
fn test_parse_pipes_on_pipes() {
|
||||
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
|
||||
|
||||
let tokens = crate::tokeniser::lexer(code);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let tokens = crate::token::lexer(code);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_arguments() {
|
||||
let some_program_string = r#"fn box = (p, h, l, w) => {
|
||||
const myBox = startSketchAt(p)
|
||||
|> line([0, l], %)
|
||||
|> line([w, 0], %)
|
||||
|> line([0, -l], %)
|
||||
|> close(%)
|
||||
|> extrude(h, %)
|
||||
|
||||
return myBox
|
||||
}
|
||||
let myBox = box([0,0], -3, -16, -10)
|
||||
show(myBox)"#;
|
||||
let tokens = crate::token::lexer(some_program_string);
|
||||
let parser = Parser::new(tokens);
|
||||
parser.ast().unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ pub struct Backend {
|
||||
/// The types of tokens the server supports.
|
||||
pub token_types: Vec<SemanticTokenType>,
|
||||
/// Token maps.
|
||||
pub token_map: DashMap<String, Vec<crate::tokeniser::Token>>,
|
||||
pub token_map: DashMap<String, Vec<crate::token::Token>>,
|
||||
/// AST maps.
|
||||
pub ast_map: DashMap<String, crate::ast::types::Program>,
|
||||
/// Current code.
|
||||
@ -56,7 +56,7 @@ impl Backend {
|
||||
// Lets update the tokens.
|
||||
self.current_code_map
|
||||
.insert(params.uri.to_string(), params.text.clone());
|
||||
let tokens = crate::tokeniser::lexer(¶ms.text);
|
||||
let tokens = crate::token::lexer(¶ms.text);
|
||||
self.token_map.insert(params.uri.to_string(), tokens.clone());
|
||||
|
||||
// Update the semantic tokens map.
|
||||
@ -69,9 +69,7 @@ impl Backend {
|
||||
continue;
|
||||
};
|
||||
|
||||
if token.token_type == crate::tokeniser::TokenType::Word
|
||||
&& self.stdlib_completions.contains_key(&token.value)
|
||||
{
|
||||
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
|
||||
// This is a stdlib function.
|
||||
token_type = SemanticTokenType::FUNCTION;
|
||||
}
|
||||
@ -549,7 +547,7 @@ impl LanguageServer for Backend {
|
||||
// Parse the ast.
|
||||
// I don't know if we need to do this again since it should be updated in the context.
|
||||
// But I figure better safe than sorry since this will write back out to the file.
|
||||
let tokens = crate::tokeniser::lexer(¤t_code);
|
||||
let tokens = crate::token::lexer(¤t_code);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let Ok(ast) = parser.ast() else {
|
||||
return Ok(None);
|
||||
@ -581,7 +579,7 @@ impl LanguageServer for Backend {
|
||||
// Parse the ast.
|
||||
// I don't know if we need to do this again since it should be updated in the context.
|
||||
// But I figure better safe than sorry since this will write back out to the file.
|
||||
let tokens = crate::tokeniser::lexer(¤t_code);
|
||||
let tokens = crate::token::lexer(¤t_code);
|
||||
let parser = crate::parser::Parser::new(tokens);
|
||||
let Ok(mut ast) = parser.ast() else {
|
||||
return Ok(None);
|
||||
|
@ -856,26 +856,6 @@ async fn inner_arc(data: ArcData, sketch_group: Box<SketchGroup>, args: Args) ->
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO: Dont do this (move path pen) - mike
|
||||
// lets review what the needs are here and see if any existing arc endpoints can accomplish this
|
||||
|
||||
// Move the path pen to the end of the arc.
|
||||
// Since that is where we want to draw the next path.
|
||||
// TODO: the engine should automatically move the pen to the end of the arc.
|
||||
// This just seems inefficient.
|
||||
args.send_modeling_cmd(
|
||||
id,
|
||||
ModelingCmd::MovePathPen {
|
||||
path: sketch_group.id,
|
||||
to: Point3D {
|
||||
x: end.x,
|
||||
y: end.y,
|
||||
z: 0.0,
|
||||
},
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let current_path = Path::ToPoint {
|
||||
base: BasePath {
|
||||
from: from.into(),
|
||||
|
173
src/wasm-lib/kcl/src/token.rs
Normal file
173
src/wasm-lib/kcl/src/token.rs
Normal file
@ -0,0 +1,173 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use parse_display::{Display, FromStr};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
mod tokeniser;
|
||||
|
||||
/// The types of tokens.
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[display(style = "camelCase")]
|
||||
pub enum TokenType {
|
||||
/// A number.
|
||||
Number,
|
||||
/// A word.
|
||||
Word,
|
||||
/// An operator.
|
||||
Operator,
|
||||
/// A string.
|
||||
String,
|
||||
/// A keyword.
|
||||
Keyword,
|
||||
/// A brace.
|
||||
Brace,
|
||||
/// Whitespace.
|
||||
Whitespace,
|
||||
/// A comma.
|
||||
Comma,
|
||||
/// A colon.
|
||||
Colon,
|
||||
/// A period.
|
||||
Period,
|
||||
/// A double period: `..`.
|
||||
DoublePeriod,
|
||||
/// A line comment.
|
||||
LineComment,
|
||||
/// A block comment.
|
||||
BlockComment,
|
||||
/// A function name.
|
||||
Function,
|
||||
}
|
||||
|
||||
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
|
||||
impl TryFrom<TokenType> for SemanticTokenType {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(token_type: TokenType) -> Result<Self> {
|
||||
Ok(match token_type {
|
||||
TokenType::Number => Self::NUMBER,
|
||||
TokenType::Word => Self::VARIABLE,
|
||||
TokenType::Keyword => Self::KEYWORD,
|
||||
TokenType::Operator => Self::OPERATOR,
|
||||
TokenType::String => Self::STRING,
|
||||
TokenType::LineComment => Self::COMMENT,
|
||||
TokenType::BlockComment => Self::COMMENT,
|
||||
TokenType::Function => Self::FUNCTION,
|
||||
TokenType::Whitespace
|
||||
| TokenType::Brace
|
||||
| TokenType::Comma
|
||||
| TokenType::Colon
|
||||
| TokenType::Period
|
||||
| TokenType::DoublePeriod => {
|
||||
anyhow::bail!("unsupported token type: {:?}", token_type)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenType {
|
||||
// This is for the lsp server.
|
||||
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
|
||||
let schema = TokenType::json_schema(&mut generator);
|
||||
let schemars::schema::Schema::Object(o) = &schema else {
|
||||
anyhow::bail!("expected object schema: {:#?}", schema);
|
||||
};
|
||||
let Some(subschemas) = &o.subschemas else {
|
||||
anyhow::bail!("expected subschemas: {:#?}", schema);
|
||||
};
|
||||
let Some(one_ofs) = &subschemas.one_of else {
|
||||
anyhow::bail!("expected one_of: {:#?}", schema);
|
||||
};
|
||||
|
||||
let mut semantic_tokens = vec![];
|
||||
for one_of in one_ofs {
|
||||
let schemars::schema::Schema::Object(o) = one_of else {
|
||||
anyhow::bail!("expected object one_of: {:#?}", one_of);
|
||||
};
|
||||
|
||||
let Some(enum_values) = o.enum_values.as_ref() else {
|
||||
anyhow::bail!("expected enum values: {:#?}", o);
|
||||
};
|
||||
|
||||
if enum_values.len() > 1 {
|
||||
anyhow::bail!("expected only one enum value: {:#?}", o);
|
||||
}
|
||||
|
||||
if enum_values.is_empty() {
|
||||
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
||||
}
|
||||
|
||||
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
|
||||
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
|
||||
semantic_tokens.push(semantic_token_type);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(semantic_tokens)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||
#[ts(export)]
|
||||
pub struct Token {
|
||||
#[serde(rename = "type")]
|
||||
pub token_type: TokenType,
|
||||
/// Offset in the source code where this token begins.
|
||||
pub start: usize,
|
||||
/// Offset in the source code where this token ends.
|
||||
pub end: usize,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
|
||||
Self {
|
||||
start: range.start,
|
||||
end: range.end,
|
||||
value,
|
||||
token_type,
|
||||
}
|
||||
}
|
||||
pub fn is_code_token(&self) -> bool {
|
||||
!matches!(
|
||||
self.token_type,
|
||||
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token> for crate::executor::SourceRange {
|
||||
fn from(token: Token) -> Self {
|
||||
Self([token.start, token.end])
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Token> for crate::executor::SourceRange {
|
||||
fn from(token: &Token) -> Self {
|
||||
Self([token.start, token.end])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lexer(s: &str) -> Vec<Token> {
|
||||
tokeniser::lexer(s).unwrap_or_default()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
||||
#[test]
|
||||
fn test_token_type_to_semantic_token_type() {
|
||||
let semantic_types = TokenType::all_semantic_token_types().unwrap();
|
||||
assert!(!semantic_types.is_empty());
|
||||
}
|
||||
}
|
1464
src/wasm-lib/kcl/src/token/tokeniser.rs
Normal file
1464
src/wasm-lib/kcl/src/token/tokeniser.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,749 +0,0 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use lazy_static::lazy_static;
|
||||
use parse_display::{Display, FromStr};
|
||||
use regex::bytes::Regex;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_lsp::lsp_types::SemanticTokenType;
|
||||
|
||||
/// The types of tokens.
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||
#[ts(export)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[display(style = "camelCase")]
|
||||
pub enum TokenType {
|
||||
/// A number.
|
||||
Number,
|
||||
/// A word.
|
||||
Word,
|
||||
/// An operator.
|
||||
Operator,
|
||||
/// A string.
|
||||
String,
|
||||
/// A keyword.
|
||||
Keyword,
|
||||
/// A brace.
|
||||
Brace,
|
||||
/// Whitespace.
|
||||
Whitespace,
|
||||
/// A comma.
|
||||
Comma,
|
||||
/// A colon.
|
||||
Colon,
|
||||
/// A period.
|
||||
Period,
|
||||
/// A double period: `..`.
|
||||
DoublePeriod,
|
||||
/// A line comment.
|
||||
LineComment,
|
||||
/// A block comment.
|
||||
BlockComment,
|
||||
/// A function name.
|
||||
Function,
|
||||
}
|
||||
|
||||
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
|
||||
impl TryFrom<TokenType> for SemanticTokenType {
|
||||
type Error = anyhow::Error;
|
||||
fn try_from(token_type: TokenType) -> Result<Self> {
|
||||
Ok(match token_type {
|
||||
TokenType::Number => Self::NUMBER,
|
||||
TokenType::Word => Self::VARIABLE,
|
||||
TokenType::Keyword => Self::KEYWORD,
|
||||
TokenType::Operator => Self::OPERATOR,
|
||||
TokenType::String => Self::STRING,
|
||||
TokenType::LineComment => Self::COMMENT,
|
||||
TokenType::BlockComment => Self::COMMENT,
|
||||
TokenType::Function => Self::FUNCTION,
|
||||
TokenType::Whitespace
|
||||
| TokenType::Brace
|
||||
| TokenType::Comma
|
||||
| TokenType::Colon
|
||||
| TokenType::Period
|
||||
| TokenType::DoublePeriod => {
|
||||
anyhow::bail!("unsupported token type: {:?}", token_type)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenType {
|
||||
// This is for the lsp server.
|
||||
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
|
||||
let mut settings = schemars::gen::SchemaSettings::openapi3();
|
||||
settings.inline_subschemas = true;
|
||||
let mut generator = schemars::gen::SchemaGenerator::new(settings);
|
||||
|
||||
let schema = TokenType::json_schema(&mut generator);
|
||||
let schemars::schema::Schema::Object(o) = &schema else {
|
||||
anyhow::bail!("expected object schema: {:#?}", schema);
|
||||
};
|
||||
let Some(subschemas) = &o.subschemas else {
|
||||
anyhow::bail!("expected subschemas: {:#?}", schema);
|
||||
};
|
||||
let Some(one_ofs) = &subschemas.one_of else {
|
||||
anyhow::bail!("expected one_of: {:#?}", schema);
|
||||
};
|
||||
|
||||
let mut semantic_tokens = vec![];
|
||||
for one_of in one_ofs {
|
||||
let schemars::schema::Schema::Object(o) = one_of else {
|
||||
anyhow::bail!("expected object one_of: {:#?}", one_of);
|
||||
};
|
||||
|
||||
let Some(enum_values) = o.enum_values.as_ref() else {
|
||||
anyhow::bail!("expected enum values: {:#?}", o);
|
||||
};
|
||||
|
||||
if enum_values.len() > 1 {
|
||||
anyhow::bail!("expected only one enum value: {:#?}", o);
|
||||
}
|
||||
|
||||
if enum_values.is_empty() {
|
||||
anyhow::bail!("expected at least one enum value: {:#?}", o);
|
||||
}
|
||||
|
||||
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
|
||||
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
|
||||
semantic_tokens.push(semantic_token_type);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(semantic_tokens)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
|
||||
#[ts(export)]
|
||||
pub struct Token {
|
||||
#[serde(rename = "type")]
|
||||
pub token_type: TokenType,
|
||||
/// Offset in the source code where this token begins.
|
||||
pub start: usize,
|
||||
/// Offset in the source code where this token ends.
|
||||
pub end: usize,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl From<Token> for crate::executor::SourceRange {
|
||||
fn from(token: Token) -> Self {
|
||||
Self([token.start, token.end])
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Token> for crate::executor::SourceRange {
|
||||
fn from(token: &Token) -> Self {
|
||||
Self([token.start, token.end])
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref NUMBER: Regex = Regex::new(r"^(\d+(\.\d*)?|\.\d+)\b").unwrap();
|
||||
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
|
||||
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
|
||||
// TODO: these should be generated using our struct types for these.
|
||||
static ref KEYWORD: Regex =
|
||||
Regex::new(r"^(if|else|for|while|return|break|continue|fn|let|mut|loop|true|false|nil|and|or|not|var|const)\b").unwrap();
|
||||
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
|
||||
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
|
||||
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
|
||||
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
|
||||
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
|
||||
static ref PARAN_END: Regex = Regex::new(r"^\)").unwrap();
|
||||
static ref ARRAY_START: Regex = Regex::new(r"^\[").unwrap();
|
||||
static ref ARRAY_END: Regex = Regex::new(r"^\]").unwrap();
|
||||
static ref COMMA: Regex = Regex::new(r"^,").unwrap();
|
||||
static ref COLON: Regex = Regex::new(r"^:").unwrap();
|
||||
static ref PERIOD: Regex = Regex::new(r"^\.").unwrap();
|
||||
static ref DOUBLE_PERIOD: Regex = Regex::new(r"^\.\.").unwrap();
|
||||
static ref LINECOMMENT: Regex = Regex::new(r"^//.*").unwrap();
|
||||
static ref BLOCKCOMMENT: Regex = Regex::new(r"^/\*[\s\S]*?\*/").unwrap();
|
||||
}
|
||||
|
||||
fn is_number(character: &[u8]) -> bool {
|
||||
NUMBER.is_match(character)
|
||||
}
|
||||
fn is_whitespace(character: &[u8]) -> bool {
|
||||
WHITESPACE.is_match(character)
|
||||
}
|
||||
fn is_word(character: &[u8]) -> bool {
|
||||
WORD.is_match(character)
|
||||
}
|
||||
fn is_keyword(character: &[u8]) -> bool {
|
||||
KEYWORD.is_match(character)
|
||||
}
|
||||
fn is_string(character: &[u8]) -> bool {
|
||||
match STRING.find(character) {
|
||||
Some(m) => m.start() == 0,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
fn is_operator(character: &[u8]) -> bool {
|
||||
OPERATOR.is_match(character)
|
||||
}
|
||||
fn is_block_start(character: &[u8]) -> bool {
|
||||
BLOCK_START.is_match(character)
|
||||
}
|
||||
fn is_block_end(character: &[u8]) -> bool {
|
||||
BLOCK_END.is_match(character)
|
||||
}
|
||||
fn is_paren_start(character: &[u8]) -> bool {
|
||||
PARAN_START.is_match(character)
|
||||
}
|
||||
fn is_paren_end(character: &[u8]) -> bool {
|
||||
PARAN_END.is_match(character)
|
||||
}
|
||||
fn is_array_start(character: &[u8]) -> bool {
|
||||
ARRAY_START.is_match(character)
|
||||
}
|
||||
fn is_array_end(character: &[u8]) -> bool {
|
||||
ARRAY_END.is_match(character)
|
||||
}
|
||||
fn is_comma(character: &[u8]) -> bool {
|
||||
COMMA.is_match(character)
|
||||
}
|
||||
fn is_colon(character: &[u8]) -> bool {
|
||||
COLON.is_match(character)
|
||||
}
|
||||
fn is_double_period(character: &[u8]) -> bool {
|
||||
DOUBLE_PERIOD.is_match(character)
|
||||
}
|
||||
fn is_period(character: &[u8]) -> bool {
|
||||
PERIOD.is_match(character)
|
||||
}
|
||||
fn is_line_comment(character: &[u8]) -> bool {
|
||||
LINECOMMENT.is_match(character)
|
||||
}
|
||||
fn is_block_comment(character: &[u8]) -> bool {
|
||||
BLOCKCOMMENT.is_match(character)
|
||||
}
|
||||
|
||||
fn match_first(s: &[u8], regex: &Regex) -> Option<String> {
|
||||
regex
|
||||
.find(s)
|
||||
.map(|the_match| String::from_utf8_lossy(the_match.as_bytes()).into())
|
||||
}
|
||||
|
||||
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
||||
Token {
|
||||
token_type,
|
||||
value: value.to_string(),
|
||||
start,
|
||||
end: start + value.len(),
|
||||
}
|
||||
}
|
||||
|
||||
fn return_token_at_index(str_from_index: &[u8], start_index: usize) -> Option<Token> {
|
||||
if is_string(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::String,
|
||||
&match_first(str_from_index, &STRING)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
let is_line_comment_bool = is_line_comment(str_from_index);
|
||||
if is_line_comment_bool || is_block_comment(str_from_index) {
|
||||
return Some(make_token(
|
||||
if is_line_comment_bool {
|
||||
TokenType::LineComment
|
||||
} else {
|
||||
TokenType::BlockComment
|
||||
},
|
||||
&match_first(
|
||||
str_from_index,
|
||||
if is_line_comment_bool {
|
||||
&LINECOMMENT
|
||||
} else {
|
||||
&BLOCKCOMMENT
|
||||
},
|
||||
)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_paren_end(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &PARAN_END)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_paren_start(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &PARAN_START)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_block_start(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &BLOCK_START)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_block_end(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &BLOCK_END)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_array_start(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &ARRAY_START)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_array_end(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Brace,
|
||||
&match_first(str_from_index, &ARRAY_END)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_comma(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Comma,
|
||||
&match_first(str_from_index, &COMMA)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_operator(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Operator,
|
||||
&match_first(str_from_index, &OPERATOR)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_number(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Number,
|
||||
&match_first(str_from_index, &NUMBER)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_keyword(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Keyword,
|
||||
&match_first(str_from_index, &KEYWORD)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_word(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Word,
|
||||
&match_first(str_from_index, &WORD)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_colon(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Colon,
|
||||
&match_first(str_from_index, &COLON)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_double_period(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::DoublePeriod,
|
||||
&match_first(str_from_index, &DOUBLE_PERIOD)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_period(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Period,
|
||||
&match_first(str_from_index, &PERIOD)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
if is_whitespace(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::Whitespace,
|
||||
&match_first(str_from_index, &WHITESPACE)?,
|
||||
start_index,
|
||||
));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn lexer(s: &str) -> Vec<Token> {
|
||||
let mut current_index = 0;
|
||||
let mut tokens = Vec::new();
|
||||
let n = s.len();
|
||||
let b = s.as_bytes();
|
||||
while current_index < n {
|
||||
let token = return_token_at_index(&b[current_index..], current_index);
|
||||
let Some(token) = token else {
|
||||
current_index += 1;
|
||||
continue;
|
||||
};
|
||||
let token_length = token.value.len();
|
||||
tokens.push(token);
|
||||
current_index += token_length;
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn is_number_test() {
|
||||
assert!(is_number("1".as_bytes()));
|
||||
assert!(is_number("1 abc".as_bytes()));
|
||||
assert!(is_number("1.1".as_bytes()));
|
||||
assert!(is_number("1.1 abc".as_bytes()));
|
||||
assert!(!is_number("a".as_bytes()));
|
||||
|
||||
assert!(is_number("1".as_bytes()));
|
||||
assert!(is_number(".1".as_bytes()));
|
||||
assert!(is_number("5?".as_bytes()));
|
||||
assert!(is_number("5 + 6".as_bytes()));
|
||||
assert!(is_number("5 + a".as_bytes()));
|
||||
assert!(is_number("5.5".as_bytes()));
|
||||
|
||||
assert!(!is_number("1abc".as_bytes()));
|
||||
assert!(!is_number("a".as_bytes()));
|
||||
assert!(!is_number("?".as_bytes()));
|
||||
assert!(!is_number("?5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_whitespace_test() {
|
||||
assert!(is_whitespace(" ".as_bytes()));
|
||||
assert!(is_whitespace(" ".as_bytes()));
|
||||
assert!(is_whitespace(" a".as_bytes()));
|
||||
assert!(is_whitespace("a ".as_bytes()));
|
||||
|
||||
assert!(!is_whitespace("a".as_bytes()));
|
||||
assert!(!is_whitespace("?".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_word_test() {
|
||||
assert!(is_word("a".as_bytes()));
|
||||
assert!(is_word("a ".as_bytes()));
|
||||
assert!(is_word("a5".as_bytes()));
|
||||
assert!(is_word("a5a".as_bytes()));
|
||||
|
||||
assert!(!is_word("5".as_bytes()));
|
||||
assert!(!is_word("5a".as_bytes()));
|
||||
assert!(!is_word("5a5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_string_test() {
|
||||
assert!(is_string("\"\"".as_bytes()));
|
||||
assert!(is_string("\"a\"".as_bytes()));
|
||||
assert!(is_string("\"a\" ".as_bytes()));
|
||||
assert!(is_string("\"a\"5".as_bytes()));
|
||||
assert!(is_string("'a'5".as_bytes()));
|
||||
assert!(is_string("\"with escaped \\\" backslash\"".as_bytes()));
|
||||
|
||||
assert!(!is_string("\"".as_bytes()));
|
||||
assert!(!is_string("\"a".as_bytes()));
|
||||
assert!(!is_string("a\"".as_bytes()));
|
||||
assert!(!is_string(" \"a\"".as_bytes()));
|
||||
assert!(!is_string("5\"a\"".as_bytes()));
|
||||
assert!(!is_string("a + 'str'".as_bytes()));
|
||||
assert!(is_string("'c'".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_operator_test() {
|
||||
assert!(is_operator("+".as_bytes()));
|
||||
assert!(is_operator("+ ".as_bytes()));
|
||||
assert!(is_operator("-".as_bytes()));
|
||||
assert!(is_operator("<=".as_bytes()));
|
||||
assert!(is_operator("<= ".as_bytes()));
|
||||
assert!(is_operator(">=".as_bytes()));
|
||||
assert!(is_operator(">= ".as_bytes()));
|
||||
assert!(is_operator("> ".as_bytes()));
|
||||
assert!(is_operator("< ".as_bytes()));
|
||||
assert!(is_operator("| ".as_bytes()));
|
||||
assert!(is_operator("|> ".as_bytes()));
|
||||
assert!(is_operator("^ ".as_bytes()));
|
||||
assert!(is_operator("% ".as_bytes()));
|
||||
assert!(is_operator("+* ".as_bytes()));
|
||||
|
||||
assert!(!is_operator("5 + 5".as_bytes()));
|
||||
assert!(!is_operator("a".as_bytes()));
|
||||
assert!(!is_operator("a+".as_bytes()));
|
||||
assert!(!is_operator("a+5".as_bytes()));
|
||||
assert!(!is_operator("5a+5".as_bytes()));
|
||||
assert!(!is_operator(", newVar".as_bytes()));
|
||||
assert!(!is_operator(",".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_block_start_test() {
|
||||
assert!(is_block_start("{".as_bytes()));
|
||||
assert!(is_block_start("{ ".as_bytes()));
|
||||
assert!(is_block_start("{5".as_bytes()));
|
||||
assert!(is_block_start("{a".as_bytes()));
|
||||
assert!(is_block_start("{5 ".as_bytes()));
|
||||
|
||||
assert!(!is_block_start("5".as_bytes()));
|
||||
assert!(!is_block_start("5 + 5".as_bytes()));
|
||||
assert!(!is_block_start("5{ + 5".as_bytes()));
|
||||
assert!(!is_block_start("a{ + 5".as_bytes()));
|
||||
assert!(!is_block_start(" { + 5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_block_end_test() {
|
||||
assert!(is_block_end("}".as_bytes()));
|
||||
assert!(is_block_end("} ".as_bytes()));
|
||||
assert!(is_block_end("}5".as_bytes()));
|
||||
assert!(is_block_end("}5 ".as_bytes()));
|
||||
|
||||
assert!(!is_block_end("5".as_bytes()));
|
||||
assert!(!is_block_end("5 + 5".as_bytes()));
|
||||
assert!(!is_block_end("5} + 5".as_bytes()));
|
||||
assert!(!is_block_end(" } + 5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_paren_start_test() {
|
||||
assert!(is_paren_start("(".as_bytes()));
|
||||
assert!(is_paren_start("( ".as_bytes()));
|
||||
assert!(is_paren_start("(5".as_bytes()));
|
||||
assert!(is_paren_start("(5 ".as_bytes()));
|
||||
assert!(is_paren_start("(5 + 5".as_bytes()));
|
||||
assert!(is_paren_start("(5 + 5)".as_bytes()));
|
||||
assert!(is_paren_start("(5 + 5) ".as_bytes()));
|
||||
|
||||
assert!(!is_paren_start("5".as_bytes()));
|
||||
assert!(!is_paren_start("5 + 5".as_bytes()));
|
||||
assert!(!is_paren_start("5( + 5)".as_bytes()));
|
||||
assert!(!is_paren_start(" ( + 5)".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_paren_end_test() {
|
||||
assert!(is_paren_end(")".as_bytes()));
|
||||
assert!(is_paren_end(") ".as_bytes()));
|
||||
assert!(is_paren_end(")5".as_bytes()));
|
||||
assert!(is_paren_end(")5 ".as_bytes()));
|
||||
|
||||
assert!(!is_paren_end("5".as_bytes()));
|
||||
assert!(!is_paren_end("5 + 5".as_bytes()));
|
||||
assert!(!is_paren_end("5) + 5".as_bytes()));
|
||||
assert!(!is_paren_end(" ) + 5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_comma_test() {
|
||||
assert!(is_comma(",".as_bytes()));
|
||||
assert!(is_comma(", ".as_bytes()));
|
||||
assert!(is_comma(",5".as_bytes()));
|
||||
assert!(is_comma(",5 ".as_bytes()));
|
||||
|
||||
assert!(!is_comma("5".as_bytes()));
|
||||
assert!(!is_comma("5 + 5".as_bytes()));
|
||||
assert!(!is_comma("5, + 5".as_bytes()));
|
||||
assert!(!is_comma(" , + 5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_line_comment_test() {
|
||||
assert!(is_line_comment("//".as_bytes()));
|
||||
assert!(is_line_comment("// ".as_bytes()));
|
||||
assert!(is_line_comment("//5".as_bytes()));
|
||||
assert!(is_line_comment("//5 ".as_bytes()));
|
||||
|
||||
assert!(!is_line_comment("5".as_bytes()));
|
||||
assert!(!is_line_comment("5 + 5".as_bytes()));
|
||||
assert!(!is_line_comment("5// + 5".as_bytes()));
|
||||
assert!(!is_line_comment(" // + 5".as_bytes()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_block_comment_test() {
|
||||
assert!(is_block_comment("/* */".as_bytes()));
|
||||
assert!(is_block_comment("/***/".as_bytes()));
|
||||
assert!(is_block_comment("/*5*/".as_bytes()));
|
||||
assert!(is_block_comment("/*5 */".as_bytes()));
|
||||
|
||||
assert!(!is_block_comment("/*".as_bytes()));
|
||||
assert!(!is_block_comment("5".as_bytes()));
|
||||
assert!(!is_block_comment("5 + 5".as_bytes()));
|
||||
assert!(!is_block_comment("5/* + 5".as_bytes()));
|
||||
assert!(!is_block_comment(" /* + 5".as_bytes()));
|
||||
assert!(!is_block_comment(
|
||||
r#" /* and
|
||||
here
|
||||
*/
|
||||
"#
|
||||
.as_bytes()
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn make_token_test() {
|
||||
assert_eq!(
|
||||
make_token(TokenType::Keyword, "const", 56),
|
||||
Token {
|
||||
token_type: TokenType::Keyword,
|
||||
value: "const".to_string(),
|
||||
start: 56,
|
||||
end: 61,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_token_at_index_test() {
|
||||
assert_eq!(
|
||||
return_token_at_index("const".as_bytes(), 0),
|
||||
Some(Token {
|
||||
token_type: TokenType::Keyword,
|
||||
value: "const".to_string(),
|
||||
start: 0,
|
||||
end: 5,
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
return_token_at_index("4554".as_bytes(), 2),
|
||||
Some(Token {
|
||||
token_type: TokenType::Number,
|
||||
value: "4554".to_string(),
|
||||
start: 2,
|
||||
end: 6,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lexer_test() {
|
||||
assert_eq!(
|
||||
lexer("const a=5"),
|
||||
vec![
|
||||
Token {
|
||||
token_type: TokenType::Keyword,
|
||||
value: "const".to_string(),
|
||||
start: 0,
|
||||
end: 5,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
value: " ".to_string(),
|
||||
start: 5,
|
||||
end: 6,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Word,
|
||||
value: "a".to_string(),
|
||||
start: 6,
|
||||
end: 7,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Operator,
|
||||
value: "=".to_string(),
|
||||
start: 7,
|
||||
end: 8,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Number,
|
||||
value: "5".to_string(),
|
||||
start: 8,
|
||||
end: 9,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
lexer("54 + 22500 + 6"),
|
||||
vec![
|
||||
Token {
|
||||
token_type: TokenType::Number,
|
||||
value: "54".to_string(),
|
||||
start: 0,
|
||||
end: 2,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
value: " ".to_string(),
|
||||
start: 2,
|
||||
end: 3,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Operator,
|
||||
value: "+".to_string(),
|
||||
start: 3,
|
||||
end: 4,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
value: " ".to_string(),
|
||||
start: 4,
|
||||
end: 5,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Number,
|
||||
value: "22500".to_string(),
|
||||
start: 5,
|
||||
end: 10,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
value: " ".to_string(),
|
||||
start: 10,
|
||||
end: 11,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Operator,
|
||||
value: "+".to_string(),
|
||||
start: 11,
|
||||
end: 12,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Whitespace,
|
||||
value: " ".to_string(),
|
||||
start: 12,
|
||||
end: 13,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Number,
|
||||
value: "6".to_string(),
|
||||
start: 13,
|
||||
end: 14,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// We have this as a test so we can ensure it never panics with an unwrap in the server.
|
||||
#[test]
|
||||
fn test_token_type_to_semantic_token_type() {
|
||||
let semantic_types = TokenType::all_semantic_token_types().unwrap();
|
||||
assert!(!semantic_types.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lexer_negative_word() {
|
||||
assert_eq!(
|
||||
lexer("-legX"),
|
||||
vec![
|
||||
Token {
|
||||
token_type: TokenType::Operator,
|
||||
value: "-".to_string(),
|
||||
start: 0,
|
||||
end: 1,
|
||||
},
|
||||
Token {
|
||||
token_type: TokenType::Word,
|
||||
value: "legX".to_string(),
|
||||
start: 1,
|
||||
end: 5,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
@ -84,13 +84,13 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
|
||||
// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
|
||||
#[wasm_bindgen]
|
||||
pub fn lexer_js(js: &str) -> Result<JsValue, JsError> {
|
||||
let tokens = kcl_lib::tokeniser::lexer(js);
|
||||
let tokens = kcl_lib::token::lexer(js);
|
||||
Ok(JsValue::from_serde(&tokens)?)
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub fn parse_js(js: &str) -> Result<JsValue, String> {
|
||||
let tokens = kcl_lib::tokeniser::lexer(js);
|
||||
let tokens = kcl_lib::token::lexer(js);
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
let program = parser.ast().map_err(String::from)?;
|
||||
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
||||
@ -149,7 +149,7 @@ pub async fn lsp_run(config: ServerConfig) -> Result<(), JsValue> {
|
||||
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||
// We can unwrap here because we know the tokeniser is valid, since
|
||||
// we have a test for it.
|
||||
let token_types = kcl_lib::tokeniser::TokenType::all_semantic_token_types().unwrap();
|
||||
let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
|
||||
|
||||
let (service, socket) = LspService::new(|client| Backend {
|
||||
client,
|
||||
|
@ -306,5 +306,5 @@ const svg = startSketchAt([0, 0])
|
||||
|> lineTo([13.44, -10.92], %) // HorizontalLineRelative
|
||||
|> lineTo([13.44, -13.44], %) // VerticalLineHorizonal
|
||||
|> lineTo([14.28, -13.44], %) // HorizontalLineRelative
|
||||
|> close(%);
|
||||
show(svg);
|
||||
|> close(%)
|
||||
show(svg)
|
||||
|
@ -466,5 +466,5 @@ const svg = startSketchAt([0, 0])
|
||||
|> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute
|
||||
|> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute
|
||||
|> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute
|
||||
|> close(%);
|
||||
show(svg);
|
||||
|> close(%)
|
||||
show(svg)
|
||||
|
@ -32,7 +32,7 @@ async fn execute_and_snapshot(code: &str) -> Result<image::DynamicImage> {
|
||||
// Create a temporary file to write the output to.
|
||||
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
||||
|
||||
let tokens = kcl_lib::tokeniser::lexer(code);
|
||||
let tokens = kcl_lib::token::lexer(code);
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
let program = parser.ast()?;
|
||||
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
|
||||
@ -210,3 +210,45 @@ show(b2)"#;
|
||||
1.0,
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_close_arc() {
|
||||
let code = r#"const center = [0,0]
|
||||
const radius = 40
|
||||
const height = 3
|
||||
|
||||
const body = startSketchAt([center[0]+radius, center[1]])
|
||||
|> arc({angle_end: 360, angle_start: 0, radius: radius}, %)
|
||||
|> close(%)
|
||||
|> extrude(height, %)
|
||||
|
||||
show(body)"#;
|
||||
|
||||
let result = execute_and_snapshot(code).await.unwrap();
|
||||
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_negative_args() {
|
||||
let code = r#"const width = 5
|
||||
const height = 10
|
||||
const length = 12
|
||||
|
||||
fn box = (sk1, sk2, scale) => {
|
||||
const boxSketch = startSketchAt([sk1, sk2])
|
||||
|> line([0, scale], %)
|
||||
|> line([scale, 0], %)
|
||||
|> line([0, -scale], %)
|
||||
|> close(%)
|
||||
|> extrude(scale, %)
|
||||
return boxSketch
|
||||
}
|
||||
|
||||
box(0, 0, 5)
|
||||
box(10, 23, 8)
|
||||
let thing = box(-12, -15, 10)
|
||||
box(-20, -5, 10)"#;
|
||||
|
||||
let result = execute_and_snapshot(code).await.unwrap();
|
||||
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0);
|
||||
}
|
||||
|
BIN
src/wasm-lib/tests/executor/outputs/close_arc.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/close_arc.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 95 KiB |
BIN
src/wasm-lib/tests/executor/outputs/negative_args.png
Normal file
BIN
src/wasm-lib/tests/executor/outputs/negative_args.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 78 KiB |
@ -33,7 +33,7 @@ async fn setup(code: &str, name: &str) -> Result<(EngineConnection, Program, uui
|
||||
.commands_ws(None, None, None, None, Some(false))
|
||||
.await?;
|
||||
|
||||
let tokens = kcl_lib::tokeniser::lexer(code);
|
||||
let tokens = kcl_lib::token::lexer(code);
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
let program = parser.ast()?;
|
||||
let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
|
||||
|
13
yarn.lock
13
yarn.lock
@ -1530,10 +1530,10 @@
|
||||
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
|
||||
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
|
||||
|
||||
"@kittycad/lib@^0.0.38":
|
||||
version "0.0.38"
|
||||
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.38.tgz#50474266f679990bd414c30f884f2d42a0d5dba9"
|
||||
integrity sha512-Lv9P7jqVRoGgOnCsRCsG8OwZH5n3scxXYrElR+5/Rsd6/KIarLB4bSBngJrXebOnmTw5md0OPeY+b3ZDbZFDeg==
|
||||
"@kittycad/lib@^0.0.39":
|
||||
version "0.0.39"
|
||||
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.39.tgz#e548acf5ff7d45a1f1ec9ad2c61ddcfc30d159b7"
|
||||
integrity sha512-cB4wNjsKTMpJUn/kMK3qtkVAqB1csSglqThe+bj02nC1kWTB1XgYxksooc/Gzl1MoK1/n0OPQcbOb7Tojb836A==
|
||||
dependencies:
|
||||
node-fetch "3.3.2"
|
||||
openapi-types "^12.0.0"
|
||||
@ -1888,11 +1888,6 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/debounce-promise/-/debounce-promise-3.1.6.tgz#873e838574011095ed0debf73eed3538e1261d75"
|
||||
integrity sha512-DowqK95aku+OxMCeG2EQSeXeGeE8OCwLpMsUfIbP7hMF8Otj8eQXnzpwdtIKV+UqQBtkMcF6vbi4Otbh8P/wmg==
|
||||
|
||||
"@types/debounce@^1.2.1":
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.1.tgz#79b65710bc8b6d44094d286aecf38e44f9627852"
|
||||
integrity sha512-epMsEE85fi4lfmJUH/89/iV/LI+F5CvNIvmgs5g5jYFPfhO2S/ae8WSsLOKWdwtoaZw9Q2IhJ4tQ5tFCcS/4HA==
|
||||
|
||||
"@types/eslint@^8.4.5":
|
||||
version "8.44.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.1.tgz#d1811559bb6bcd1a76009e3f7883034b78a0415e"
|
||||
|
Reference in New Issue
Block a user