Compare commits

...

13 Commits

Author SHA1 Message Date
12b3717eb5 Cut release v0.9.2 (#714) 2023-09-26 20:39:05 -04:00
0bc685b0c4 Bump tungstenite from 0.20.0 to 0.20.1 in /src/wasm-lib/kcl/fuzz (#709)
Bumps [tungstenite](https://github.com/snapview/tungstenite-rs) from 0.20.0 to 0.20.1.
- [Changelog](https://github.com/snapview/tungstenite-rs/blob/master/CHANGELOG.md)
- [Commits](https://github.com/snapview/tungstenite-rs/compare/v0.20.0...v0.20.1)

---
updated-dependencies:
- dependency-name: tungstenite
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-25 20:49:16 -07:00
9ee032771a unused dep (#710) 2023-09-26 03:22:05 +00:00
c307ddd1b1 resize (#706)
* start of resize

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* refactor

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* check if 0

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* will work w new lib

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* new types

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* handle resize effect

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
Co-authored-by: Kurt Hutten Irev-Dev <k.hutten@protonmail.ch>
2023-09-25 19:49:53 -07:00
a30818ff2b fixes negative args in function (#707)
* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* updates

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* fixes

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2023-09-25 15:25:58 -07:00
53e763d938 fix close arc (#704)
* fix close arc

Signed-off-by: Jess Frazelle <github@jessfraz.com>

* much bigger radius

Signed-off-by: Jess Frazelle <github@jessfraz.com>

---------

Signed-off-by: Jess Frazelle <github@jessfraz.com>
2023-09-25 12:14:41 -07:00
8f74cd1d0c Bump tauri-plugin-fs-extra from 0190f68 to b04bde3 in /src-tauri (#702)
Bumps [tauri-plugin-fs-extra](https://github.com/tauri-apps/plugins-workspace) from `0190f68` to `b04bde3`.
- [Release notes](https://github.com/tauri-apps/plugins-workspace/releases)
- [Commits](0190f68f1d...b04bde3461)

---
updated-dependencies:
- dependency-name: tauri-plugin-fs-extra
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-09-25 09:33:53 -07:00
c271942897 remove errors (#703) 2023-09-25 07:28:03 +00:00
a03d09b41d Restructure tokenizer module (#700)
* Remove duplicated tests

These tests already were copied to tokeniser2.rs, so removing them doesn't affect code coverage.

* Move tokeniser to its own module

Now there's a module for tokens, and the tokenizer/lexer implementation is private within the token module.
2023-09-24 20:01:17 -05:00
2971b7752b Bump rust websocket libraries (#701)
Changelog: https://github.com/snapview/tokio-tungstenite/blob/master/CHANGELOG.md#0201
2023-09-24 21:34:31 +00:00
70e99eb00b Refactor is_code_token into a method (#699)
* Refactor is_code_token into a method

* Fix typos, use Parser as it was imported
2023-09-24 21:11:36 +00:00
5c66af59d2 New tokenizer based on winnow (#697)
* New tokenizer, using Winnow instead of regexes.

Between 1.3x and 4.4x speedup on lexer benchmarks :)

* Use dispatch instead of alt

Most of the time, if you know the first character of a token, you can narrow down its potential possible token types, instead of just trying each token type until one succeeds.

This further speeds up the lexer. Compared to main, this branch is now between 3x and 12x faster than main.
2023-09-22 21:57:39 -05:00
6dda6daeef Use separate benchmarks for lexing and parsing (#698) 2023-09-23 02:01:18 +00:00
50 changed files with 2207 additions and 1176 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "untitled-app", "name": "untitled-app",
"version": "0.9.1", "version": "0.9.2",
"private": true, "private": true,
"dependencies": { "dependencies": {
"@codemirror/autocomplete": "^6.9.0", "@codemirror/autocomplete": "^6.9.0",
@ -10,7 +10,7 @@
"@fortawesome/react-fontawesome": "^0.2.0", "@fortawesome/react-fontawesome": "^0.2.0",
"@headlessui/react": "^1.7.13", "@headlessui/react": "^1.7.13",
"@headlessui/tailwindcss": "^0.2.0", "@headlessui/tailwindcss": "^0.2.0",
"@kittycad/lib": "^0.0.38", "@kittycad/lib": "^0.0.39",
"@lezer/javascript": "^1.4.7", "@lezer/javascript": "^1.4.7",
"@open-rpc/client-js": "^1.8.1", "@open-rpc/client-js": "^1.8.1",
"@react-hook/resize-observer": "^1.2.6", "@react-hook/resize-observer": "^1.2.6",
@ -102,7 +102,6 @@
"@babel/preset-env": "^7.22.9", "@babel/preset-env": "^7.22.9",
"@tauri-apps/cli": "^1.3.1", "@tauri-apps/cli": "^1.3.1",
"@types/crypto-js": "^4.1.1", "@types/crypto-js": "^4.1.1",
"@types/debounce": "^1.2.1",
"@types/debounce-promise": "^3.1.6", "@types/debounce-promise": "^3.1.6",
"@types/isomorphic-fetch": "^0.0.36", "@types/isomorphic-fetch": "^0.0.36",
"@types/react-modal": "^3.16.0", "@types/react-modal": "^3.16.0",

2
src-tauri/Cargo.lock generated
View File

@ -3775,7 +3775,7 @@ dependencies = [
[[package]] [[package]]
name = "tauri-plugin-fs-extra" name = "tauri-plugin-fs-extra"
version = "0.0.0" version = "0.0.0"
source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#0190f68f1dff80576595a1b79e31338a3e9ebba1" source = "git+https://github.com/tauri-apps/plugins-workspace?branch=v1#b04bde3461066c709d6801cf9ca305cf889a8394"
dependencies = [ dependencies = [
"log", "log",
"serde", "serde",

View File

@ -8,7 +8,7 @@
}, },
"package": { "package": {
"productName": "kittycad-modeling", "productName": "kittycad-modeling",
"version": "0.9.1" "version": "0.9.2"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {

View File

@ -31,6 +31,7 @@ import { TextEditor } from 'components/TextEditor'
import { Themes, getSystemTheme } from 'lib/theme' import { Themes, getSystemTheme } from 'lib/theme'
import { useSetupEngineManager } from 'hooks/useSetupEngineManager' import { useSetupEngineManager } from 'hooks/useSetupEngineManager'
import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions' import { useEngineConnectionSubscriptions } from 'hooks/useEngineConnectionSubscriptions'
import { engineCommandManager } from './lang/std/engineConnection'
export function App() { export function App() {
const { code: loadedCode, project } = useLoaderData() as IndexLoaderData const { code: loadedCode, project } = useLoaderData() as IndexLoaderData
@ -39,7 +40,6 @@ export function App() {
useHotKeyListener() useHotKeyListener()
const { const {
setCode, setCode,
engineCommandManager,
buttonDownInStream, buttonDownInStream,
openPanes, openPanes,
setOpenPanes, setOpenPanes,
@ -52,7 +52,6 @@ export function App() {
guiMode: s.guiMode, guiMode: s.guiMode,
setGuiMode: s.setGuiMode, setGuiMode: s.setGuiMode,
setCode: s.setCode, setCode: s.setCode,
engineCommandManager: s.engineCommandManager,
buttonDownInStream: s.buttonDownInStream, buttonDownInStream: s.buttonDownInStream,
openPanes: s.openPanes, openPanes: s.openPanes,
setOpenPanes: s.setOpenPanes, setOpenPanes: s.setOpenPanes,
@ -91,12 +90,12 @@ export function App() {
if (guiMode.sketchMode === 'sketchEdit') { if (guiMode.sketchMode === 'sketchEdit') {
// TODO: share this with Toolbar's "Exit sketch" button // TODO: share this with Toolbar's "Exit sketch" button
// exiting sketch should be done consistently across all exits // exiting sketch should be done consistently across all exits
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' }, cmd: { type: 'edit_mode_exit' },
}) })
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' }, cmd: { type: 'default_camera_disable_sketch_mode' },
@ -107,7 +106,7 @@ export function App() {
// when exiting sketch mode in the future // when exiting sketch mode in the future
executeAst() executeAst()
} else { } else {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {
@ -156,7 +155,7 @@ export function App() {
useEngineConnectionSubscriptions() useEngineConnectionSubscriptions()
const debounceSocketSend = throttle<EngineCommand>((message) => { const debounceSocketSend = throttle<EngineCommand>((message) => {
engineCommandManager?.sendSceneCommand(message) engineCommandManager.sendSceneCommand(message)
}, 16) }, 16)
const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => { const handleMouseMove: MouseEventHandler<HTMLDivElement> = (e) => {
e.nativeEvent.preventDefault() e.nativeEvent.preventDefault()
@ -216,7 +215,6 @@ export function App() {
} else if (interactionGuards.zoom.dragCallback(eWithButton)) { } else if (interactionGuards.zoom.dragCallback(eWithButton)) {
interaction = 'zoom' interaction = 'zoom'
} else { } else {
console.log('none')
return return
} }

View File

@ -18,6 +18,7 @@ import styles from './Toolbar.module.css'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import { useAppMode } from 'hooks/useAppMode' import { useAppMode } from 'hooks/useAppMode'
import { ActionIcon } from 'components/ActionIcon' import { ActionIcon } from 'components/ActionIcon'
import { engineCommandManager } from './lang/std/engineConnection'
export const sketchButtonClassnames = { export const sketchButtonClassnames = {
background: background:
@ -50,7 +51,6 @@ export const Toolbar = () => {
ast, ast,
updateAst, updateAst,
programMemory, programMemory,
engineCommandManager,
executeAst, executeAst,
} = useStore((s) => ({ } = useStore((s) => ({
guiMode: s.guiMode, guiMode: s.guiMode,
@ -59,15 +59,10 @@ export const Toolbar = () => {
ast: s.ast, ast: s.ast,
updateAst: s.updateAst, updateAst: s.updateAst,
programMemory: s.programMemory, programMemory: s.programMemory,
engineCommandManager: s.engineCommandManager,
executeAst: s.executeAst, executeAst: s.executeAst,
})) }))
useAppMode() useAppMode()
useEffect(() => {
console.log('guiMode', guiMode)
}, [guiMode])
function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) { function ToolbarButtons({ className }: React.HTMLAttributes<HTMLElement>) {
return ( return (
<span className={styles.toolbarButtons + ' ' + className}> <span className={styles.toolbarButtons + ' ' + className}>
@ -173,12 +168,12 @@ export const Toolbar = () => {
{guiMode.mode === 'sketch' && ( {guiMode.mode === 'sketch' && (
<button <button
onClick={() => { onClick={() => {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' }, cmd: { type: 'edit_mode_exit' },
}) })
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' }, cmd: { type: 'default_camera_disable_sketch_mode' },
@ -214,7 +209,7 @@ export const Toolbar = () => {
<button <button
key={sketchFnName} key={sketchFnName}
onClick={() => { onClick={() => {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {

View File

@ -10,6 +10,7 @@ import {
} from '../lang/modifyAst' } from '../lang/modifyAst'
import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst' import { findAllPreviousVariables, PrevVariable } from '../lang/queryAst'
import { useStore } from '../useStore' import { useStore } from '../useStore'
import { engineCommandManager } from '../lang/std/engineConnection'
export const AvailableVars = ({ export const AvailableVars = ({
onVarClick, onVarClick,
@ -92,14 +93,11 @@ export function useCalc({
newVariableInsertIndex: number newVariableInsertIndex: number
setNewVariableName: (a: string) => void setNewVariableName: (a: string) => void
} { } {
const { ast, programMemory, selectionRange, engineCommandManager } = useStore( const { ast, programMemory, selectionRange } = useStore((s) => ({
(s) => ({ ast: s.ast,
ast: s.ast, programMemory: s.programMemory,
programMemory: s.programMemory, selectionRange: s.selectionRanges.codeBasedSelections[0].range,
selectionRange: s.selectionRanges.codeBasedSelections[0].range, }))
engineCommandManager: s.engineCommandManager,
})
)
const inputRef = useRef<HTMLInputElement>(null) const inputRef = useRef<HTMLInputElement>(null)
const [availableVarInfo, setAvailableVarInfo] = useState< const [availableVarInfo, setAvailableVarInfo] = useState<
ReturnType<typeof findAllPreviousVariables> ReturnType<typeof findAllPreviousVariables>
@ -140,7 +138,6 @@ export function useCalc({
}, [ast, programMemory, selectionRange]) }, [ast, programMemory, selectionRange])
useEffect(() => { useEffect(() => {
if (!engineCommandManager) return
try { try {
const code = `const __result__ = ${value}\nshow(__result__)` const code = `const __result__ = ${value}\nshow(__result__)`
const ast = parser_wasm(code) const ast = parser_wasm(code)

View File

@ -1,5 +1,4 @@
import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel' import { CollapsiblePanel, CollapsiblePanelProps } from './CollapsiblePanel'
import { useStore } from '../useStore'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import { EngineCommand } from '../lang/std/engineConnection' import { EngineCommand } from '../lang/std/engineConnection'
import { useState } from 'react' import { useState } from 'react'
@ -7,6 +6,7 @@ import { ActionButton } from '../components/ActionButton'
import { faCheck } from '@fortawesome/free-solid-svg-icons' import { faCheck } from '@fortawesome/free-solid-svg-icons'
import { isReducedMotion } from 'lang/util' import { isReducedMotion } from 'lang/util'
import { AstExplorer } from './AstExplorer' import { AstExplorer } from './AstExplorer'
import { engineCommandManager } from '../lang/std/engineConnection'
type SketchModeCmd = Extract< type SketchModeCmd = Extract<
Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'], Extract<EngineCommand, { type: 'modeling_cmd_req' }>['cmd'],
@ -14,9 +14,6 @@ type SketchModeCmd = Extract<
> >
export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => { export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
const { engineCommandManager } = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
}))
const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({ const [sketchModeCmd, setSketchModeCmd] = useState<SketchModeCmd>({
type: 'default_camera_enable_sketch_mode', type: 'default_camera_enable_sketch_mode',
origin: { x: 0, y: 0, z: 0 }, origin: { x: 0, y: 0, z: 0 },
@ -70,19 +67,18 @@ export const DebugPanel = ({ className, ...props }: CollapsiblePanelProps) => {
className="w-16" className="w-16"
type="checkbox" type="checkbox"
checked={sketchModeCmd.ortho} checked={sketchModeCmd.ortho}
onChange={(a) => { onChange={(a) =>
console.log(a, (a as any).checked)
setSketchModeCmd({ setSketchModeCmd({
...sketchModeCmd, ...sketchModeCmd,
ortho: a.target.checked, ortho: a.target.checked,
}) })
}} }
/> />
</div> </div>
<ActionButton <ActionButton
Element="button" Element="button"
onClick={() => { onClick={() => {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: sketchModeCmd, cmd: sketchModeCmd,
cmd_id: uuidv4(), cmd_id: uuidv4(),

View File

@ -1,11 +1,11 @@
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import { useStore } from '../useStore'
import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons' import { faFileExport, faXmark } from '@fortawesome/free-solid-svg-icons'
import { ActionButton } from './ActionButton' import { ActionButton } from './ActionButton'
import Modal from 'react-modal' import Modal from 'react-modal'
import React from 'react' import React from 'react'
import { useFormik } from 'formik' import { useFormik } from 'formik'
import { Models } from '@kittycad/lib' import { Models } from '@kittycad/lib'
import { engineCommandManager } from '../lang/std/engineConnection'
type OutputFormat = Models['OutputFormat_type'] type OutputFormat = Models['OutputFormat_type']
@ -18,10 +18,6 @@ interface ExportButtonProps extends React.PropsWithChildren {
} }
export const ExportButton = ({ children, className }: ExportButtonProps) => { export const ExportButton = ({ children, className }: ExportButtonProps) => {
const { engineCommandManager } = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
}))
const [modalIsOpen, setIsOpen] = React.useState(false) const [modalIsOpen, setIsOpen] = React.useState(false)
const defaultType = 'gltf' const defaultType = 'gltf'
@ -66,7 +62,7 @@ export const ExportButton = ({ children, className }: ExportButtonProps) => {
}, },
} }
} }
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'export', type: 'export',

View File

@ -25,6 +25,7 @@ import { modify_ast_for_sketch } from '../wasm-lib/pkg/wasm_lib'
import { KCLError } from 'lang/errors' import { KCLError } from 'lang/errors'
import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError' import { KclError as RustKclError } from '../wasm-lib/kcl/bindings/KclError'
import { rangeTypeFix } from 'lang/abstractSyntaxTree' import { rangeTypeFix } from 'lang/abstractSyntaxTree'
import { engineCommandManager } from '../lang/std/engineConnection'
export const Stream = ({ className = '' }) => { export const Stream = ({ className = '' }) => {
const [isLoading, setIsLoading] = useState(true) const [isLoading, setIsLoading] = useState(true)
@ -32,7 +33,6 @@ export const Stream = ({ className = '' }) => {
const videoRef = useRef<HTMLVideoElement>(null) const videoRef = useRef<HTMLVideoElement>(null)
const { const {
mediaStream, mediaStream,
engineCommandManager,
setButtonDownInStream, setButtonDownInStream,
didDragInStream, didDragInStream,
setDidDragInStream, setDidDragInStream,
@ -45,7 +45,6 @@ export const Stream = ({ className = '' }) => {
programMemory, programMemory,
} = useStore((s) => ({ } = useStore((s) => ({
mediaStream: s.mediaStream, mediaStream: s.mediaStream,
engineCommandManager: s.engineCommandManager,
setButtonDownInStream: s.setButtonDownInStream, setButtonDownInStream: s.setButtonDownInStream,
fileId: s.fileId, fileId: s.fileId,
didDragInStream: s.didDragInStream, didDragInStream: s.didDragInStream,
@ -73,7 +72,7 @@ export const Stream = ({ className = '' }) => {
if (!videoRef.current) return if (!videoRef.current) return
if (!mediaStream) return if (!mediaStream) return
videoRef.current.srcObject = mediaStream videoRef.current.srcObject = mediaStream
}, [mediaStream, engineCommandManager]) }, [mediaStream])
const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => { const handleMouseDown: MouseEventHandler<HTMLVideoElement> = (e) => {
if (!videoRef.current) return if (!videoRef.current) return
@ -107,7 +106,7 @@ export const Stream = ({ className = '' }) => {
} }
if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) { if (guiMode.mode === 'sketch' && guiMode.sketchMode === ('move' as any)) {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'handle_mouse_drag_start', type: 'handle_mouse_drag_start',
@ -121,7 +120,7 @@ export const Stream = ({ className = '' }) => {
guiMode.sketchMode === ('sketch_line' as any) guiMode.sketchMode === ('sketch_line' as any)
) )
) { ) {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'camera_drag_start', type: 'camera_drag_start',
@ -139,7 +138,7 @@ export const Stream = ({ className = '' }) => {
const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => { const handleScroll: WheelEventHandler<HTMLVideoElement> = (e) => {
if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return if (!cameraMouseDragGuards[cameraControls].zoom.scrollCallback(e)) return
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'default_camera_zoom', type: 'default_camera_zoom',
@ -177,7 +176,7 @@ export const Stream = ({ className = '' }) => {
} }
if (!didDragInStream) { if (!didDragInStream) {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'select_with_point', type: 'select_with_point',
@ -214,7 +213,7 @@ export const Stream = ({ className = '' }) => {
window: { x, y }, window: { x, y },
} }
} }
engineCommandManager?.sendSceneCommand(command).then(async (resp) => { engineCommandManager.sendSceneCommand(command).then(async (resp) => {
if (!(guiMode.mode === 'sketch')) return if (!(guiMode.mode === 'sketch')) return
if (guiMode.sketchMode === 'selectFace') return if (guiMode.sketchMode === 'selectFace') return
@ -240,9 +239,6 @@ export const Stream = ({ className = '' }) => {
) { ) {
// Let's get the updated ast. // Let's get the updated ast.
if (sketchGroupId === '') return if (sketchGroupId === '') return
console.log('guiMode.pathId', guiMode.pathId)
// We have a problem if we do not have an id for the sketch group. // We have a problem if we do not have an id for the sketch group.
if ( if (
guiMode.pathId === undefined || guiMode.pathId === undefined ||
@ -285,7 +281,7 @@ export const Stream = ({ className = '' }) => {
guiMode.waitingFirstClick && guiMode.waitingFirstClick &&
!isEditingExistingSketch !isEditingExistingSketch
) { ) {
const curve = await engineCommandManager?.sendSceneCommand({ const curve = await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {
@ -326,7 +322,7 @@ export const Stream = ({ className = '' }) => {
resp?.data?.data?.entities_modified?.length && resp?.data?.data?.entities_modified?.length &&
(!guiMode.waitingFirstClick || isEditingExistingSketch) (!guiMode.waitingFirstClick || isEditingExistingSketch)
) { ) {
const curve = await engineCommandManager?.sendSceneCommand({ const curve = await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {
@ -371,12 +367,12 @@ export const Stream = ({ className = '' }) => {
setGuiMode({ setGuiMode({
mode: 'default', mode: 'default',
}) })
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'edit_mode_exit' }, cmd: { type: 'edit_mode_exit' },
}) })
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { type: 'default_camera_disable_sketch_mode' }, cmd: { type: 'default_camera_disable_sketch_mode' },

View File

@ -30,6 +30,7 @@ import { isOverlap, roundOff } from 'lib/utils'
import { kclErrToDiagnostic } from 'lang/errors' import { kclErrToDiagnostic } from 'lang/errors'
import { CSSRuleObject } from 'tailwindcss/types/config' import { CSSRuleObject } from 'tailwindcss/types/config'
import interact from '@replit/codemirror-interact' import interact from '@replit/codemirror-interact'
import { engineCommandManager } from '../lang/std/engineConnection'
export const editorShortcutMeta = { export const editorShortcutMeta = {
formatCode: { formatCode: {
@ -52,7 +53,6 @@ export const TextEditor = ({
code, code,
deferredSetCode, deferredSetCode,
editorView, editorView,
engineCommandManager,
formatCode, formatCode,
isLSPServerReady, isLSPServerReady,
selectionRanges, selectionRanges,
@ -64,7 +64,6 @@ export const TextEditor = ({
code: s.code, code: s.code,
deferredSetCode: s.deferredSetCode, deferredSetCode: s.deferredSetCode,
editorView: s.editorView, editorView: s.editorView,
engineCommandManager: s.engineCommandManager,
formatCode: s.formatCode, formatCode: s.formatCode,
isLSPServerReady: s.isLSPServerReady, isLSPServerReady: s.isLSPServerReady,
selectionRanges: s.selectionRanges, selectionRanges: s.selectionRanges,
@ -173,7 +172,7 @@ export const TextEditor = ({
const idBasedSelections = codeBasedSelections const idBasedSelections = codeBasedSelections
.map(({ type, range }) => { .map(({ type, range }) => {
const hasOverlap = Object.entries( const hasOverlap = Object.entries(
engineCommandManager?.sourceRangeMap || {} engineCommandManager.sourceRangeMap || {}
).filter(([_, sourceRange]) => { ).filter(([_, sourceRange]) => {
return isOverlap(sourceRange, range) return isOverlap(sourceRange, range)
}) })
@ -186,7 +185,7 @@ export const TextEditor = ({
}) })
.filter(Boolean) as any .filter(Boolean) as any
engineCommandManager?.cusorsSelected({ engineCommandManager.cusorsSelected({
otherSelections: [], otherSelections: [],
idBasedSelections, idBasedSelections,
}) })

View File

@ -133,7 +133,7 @@ export const SetAbsDistance = ({ buttonType }: { buttonType: ButtonType }) => {
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap), callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
}) })
} catch (e) { } catch (e) {
console.log('e', e) console.log('error', e)
} }
}} }}
disabled={!enableAngLen} disabled={!enableAngLen}

View File

@ -147,7 +147,7 @@ export const SetAngleLength = ({
callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap), callBack: updateCursors(setCursor, selectionRanges, pathToNodeMap),
}) })
} catch (e) { } catch (e) {
console.log('e', e) console.log('erorr', e)
} }
}} }}
disabled={!enableAngLen} disabled={!enableAngLen}

View File

@ -109,7 +109,6 @@ export default class Client extends jsrpc.JSONRPCServerAndClient {
} }
} }
messageString += message messageString += message
// console.log(messageString)
return return
}) })

View File

@ -96,8 +96,6 @@ export class LanguageServerPlugin implements PluginValue {
async sendChange({ documentText }: { documentText: string }) { async sendChange({ documentText }: { documentText: string }) {
if (!this.client.ready) return if (!this.client.ready) return
console.log(documentText.length)
if (documentText.length > 5000) { if (documentText.length > 5000) {
// Clear out the text it thinks we have, large documents will throw a stack error. // Clear out the text it thinks we have, large documents will throw a stack error.
// This is obviously not a good fix but it works for now til we figure // This is obviously not a good fix but it works for now til we figure

View File

@ -8,6 +8,7 @@ import { ArtifactMap, EngineCommandManager } from 'lang/std/engineConnection'
import { Models } from '@kittycad/lib/dist/types/src' import { Models } from '@kittycad/lib/dist/types/src'
import { isReducedMotion } from 'lang/util' import { isReducedMotion } from 'lang/util'
import { isOverlap } from 'lib/utils' import { isOverlap } from 'lib/utils'
import { engineCommandManager } from '../lang/std/engineConnection'
interface DefaultPlanes { interface DefaultPlanes {
xy: string xy: string
@ -17,19 +18,13 @@ interface DefaultPlanes {
} }
export function useAppMode() { export function useAppMode() {
const { const { guiMode, setGuiMode, selectionRanges, selectionRangeTypeMap } =
guiMode, useStore((s) => ({
setGuiMode, guiMode: s.guiMode,
selectionRanges, setGuiMode: s.setGuiMode,
engineCommandManager, selectionRanges: s.selectionRanges,
selectionRangeTypeMap, selectionRangeTypeMap: s.selectionRangeTypeMap,
} = useStore((s) => ({ }))
guiMode: s.guiMode,
setGuiMode: s.setGuiMode,
selectionRanges: s.selectionRanges,
engineCommandManager: s.engineCommandManager,
selectionRangeTypeMap: s.selectionRangeTypeMap,
}))
const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null) const [defaultPlanes, setDefaultPlanes] = useState<DefaultPlanes | null>(null)
useEffect(() => { useEffect(() => {
if ( if (
@ -65,7 +60,7 @@ export function useAppMode() {
setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true) setDefaultPlanesHidden(engineCommandManager, localDefaultPlanes, true)
// TODO figure out the plane to use based on the sketch // TODO figure out the plane to use based on the sketch
// maybe it's easier to make a new plane than rely on the defaults // maybe it's easier to make a new plane than rely on the defaults
await engineCommandManager?.sendSceneCommand({ await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {
@ -135,7 +130,7 @@ export function useAppMode() {
]) ])
useEffect(() => { useEffect(() => {
const unSub = engineCommandManager?.subscribeTo({ const unSub = engineCommandManager.subscribeTo({
event: 'select_with_point', event: 'select_with_point',
callback: async ({ data }) => { callback: async ({ data }) => {
if (!data.entity_id) return if (!data.entity_id) return
@ -144,18 +139,16 @@ export function useAppMode() {
// user clicked something else in the scene // user clicked something else in the scene
return return
} }
const sketchModeResponse = await engineCommandManager?.sendSceneCommand( const sketchModeResponse = await engineCommandManager.sendSceneCommand({
{ type: 'modeling_cmd_req',
type: 'modeling_cmd_req', cmd_id: uuidv4(),
cmd_id: uuidv4(), cmd: {
cmd: { type: 'sketch_mode_enable',
type: 'sketch_mode_enable', plane_id: data.entity_id,
plane_id: data.entity_id, ortho: true,
ortho: true, animated: !isReducedMotion(),
animated: !isReducedMotion(), },
}, })
}
)
setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true) setDefaultPlanesHidden(engineCommandManager, defaultPlanes, true)
const sketchUuid = uuidv4() const sketchUuid = uuidv4()
const proms: any[] = [] const proms: any[] = []
@ -178,8 +171,7 @@ export function useAppMode() {
}, },
}) })
) )
const res = await Promise.all(proms) await Promise.all(proms)
console.log('res', res)
setGuiMode({ setGuiMode({
mode: 'sketch', mode: 'sketch',
sketchMode: 'sketchEdit', sketchMode: 'sketchEdit',
@ -209,7 +201,7 @@ async function createPlane(
} }
) { ) {
const planeId = uuidv4() const planeId = uuidv4()
await engineCommandManager?.sendSceneCommand({ await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'make_plane', type: 'make_plane',
@ -221,7 +213,7 @@ async function createPlane(
}, },
cmd_id: planeId, cmd_id: planeId,
}) })
await engineCommandManager?.sendSceneCommand({ await engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd: { cmd: {
type: 'plane_set_color', type: 'plane_set_color',
@ -234,12 +226,12 @@ async function createPlane(
} }
function setDefaultPlanesHidden( function setDefaultPlanesHidden(
engineCommandManager: EngineCommandManager | undefined, engineCommandManager: EngineCommandManager,
defaultPlanes: DefaultPlanes, defaultPlanes: DefaultPlanes,
hidden: boolean hidden: boolean
) { ) {
Object.values(defaultPlanes).forEach((planeId) => { Object.values(defaultPlanes).forEach((planeId) => {
engineCommandManager?.sendSceneCommand({ engineCommandManager.sendSceneCommand({
type: 'modeling_cmd_req', type: 'modeling_cmd_req',
cmd_id: uuidv4(), cmd_id: uuidv4(),
cmd: { cmd: {

View File

@ -1,14 +1,9 @@
import { useEffect } from 'react' import { useEffect } from 'react'
import { useStore } from 'useStore' import { useStore } from 'useStore'
import { engineCommandManager } from '../lang/std/engineConnection'
export function useEngineConnectionSubscriptions() { export function useEngineConnectionSubscriptions() {
const { const { setCursor2, setHighlightRange, highlightRange } = useStore((s) => ({
engineCommandManager,
setCursor2,
setHighlightRange,
highlightRange,
} = useStore((s) => ({
engineCommandManager: s.engineCommandManager,
setCursor2: s.setCursor2, setCursor2: s.setCursor2,
setHighlightRange: s.setHighlightRange, setHighlightRange: s.setHighlightRange,
highlightRange: s.highlightRange, highlightRange: s.highlightRange,

View File

@ -1,53 +1,90 @@
import { useLayoutEffect } from 'react' import { useLayoutEffect, useEffect, useRef } from 'react'
import { _executor } from '../lang/executor' import { _executor } from '../lang/executor'
import { useStore } from '../useStore' import { useStore } from '../useStore'
import { EngineCommandManager } from '../lang/std/engineConnection' import { engineCommandManager } from '../lang/std/engineConnection'
import { deferExecution } from 'lib/utils'
export function useSetupEngineManager( export function useSetupEngineManager(
streamRef: React.RefObject<HTMLDivElement>, streamRef: React.RefObject<HTMLDivElement>,
token?: string token?: string
) { ) {
const { const {
setEngineCommandManager,
setMediaStream, setMediaStream,
setIsStreamReady, setIsStreamReady,
setStreamDimensions, setStreamDimensions,
executeCode, executeCode,
streamDimensions,
} = useStore((s) => ({ } = useStore((s) => ({
setEngineCommandManager: s.setEngineCommandManager,
setMediaStream: s.setMediaStream, setMediaStream: s.setMediaStream,
setIsStreamReady: s.setIsStreamReady, setIsStreamReady: s.setIsStreamReady,
setStreamDimensions: s.setStreamDimensions, setStreamDimensions: s.setStreamDimensions,
executeCode: s.executeCode, executeCode: s.executeCode,
streamDimensions: s.streamDimensions,
})) }))
const streamWidth = streamRef?.current?.offsetWidth const streamWidth = streamRef?.current?.offsetWidth
const streamHeight = streamRef?.current?.offsetHeight const streamHeight = streamRef?.current?.offsetHeight
const hasSetNonZeroDimensions = useRef<boolean>(false)
useLayoutEffect(() => {
// Load the engine command manager once with the initial width and height,
// then we do not want to reload it.
const { width: quadWidth, height: quadHeight } = getDimensions(
streamWidth,
streamHeight
)
if (!hasSetNonZeroDimensions.current && quadHeight && quadWidth) {
engineCommandManager.start({
setMediaStream,
setIsStreamReady,
width: quadWidth,
height: quadHeight,
token,
})
engineCommandManager.waitForReady.then(() => {
executeCode()
})
setStreamDimensions({
streamWidth: quadWidth,
streamHeight: quadHeight,
})
hasSetNonZeroDimensions.current = true
}
}, [streamRef?.current?.offsetWidth, streamRef?.current?.offsetHeight])
useEffect(() => {
const handleResize = deferExecution(() => {
const { width, height } = getDimensions(
streamRef?.current?.offsetWidth,
streamRef?.current?.offsetHeight
)
if (
streamDimensions.streamWidth !== width ||
streamDimensions.streamHeight !== height
) {
engineCommandManager.handleResize({
streamWidth: width,
streamHeight: height,
})
setStreamDimensions({
streamWidth: width,
streamHeight: height,
})
}
}, 500)
window.addEventListener('resize', handleResize)
return () => {
window.removeEventListener('resize', handleResize)
}
}, [])
}
function getDimensions(streamWidth?: number, streamHeight?: number) {
const width = streamWidth ? streamWidth : 0 const width = streamWidth ? streamWidth : 0
const quadWidth = Math.round(width / 4) * 4 const quadWidth = Math.round(width / 4) * 4
const height = streamHeight ? streamHeight : 0 const height = streamHeight ? streamHeight : 0
const quadHeight = Math.round(height / 4) * 4 const quadHeight = Math.round(height / 4) * 4
return { width: quadWidth, height: quadHeight }
useLayoutEffect(() => {
setStreamDimensions({
streamWidth: quadWidth,
streamHeight: quadHeight,
})
if (!width || !height) return
const eng = new EngineCommandManager({
setMediaStream,
setIsStreamReady,
width: quadWidth,
height: quadHeight,
token,
})
setEngineCommandManager(eng)
eng.waitForReady.then(() => {
executeCode()
})
return () => {
eng?.tearDown()
}
}, [quadWidth, quadHeight])
} }

View File

@ -48,7 +48,7 @@ export function useConvertToVariable() {
updateAst(_modifiedAst, true) updateAst(_modifiedAst, true)
} catch (e) { } catch (e) {
console.log('e', e) console.log('error', e)
} }
} }

View File

@ -1691,7 +1691,6 @@ describe('parsing errors', () => {
let _theError let _theError
try { try {
const result = expect(parser_wasm(code)) const result = expect(parser_wasm(code))
console.log('result', result)
} catch (e) { } catch (e) {
_theError = e _theError = e
} }

View File

@ -7,7 +7,7 @@ export const recast = (ast: Program): string => {
return s return s
} catch (e) { } catch (e) {
// TODO: do something real with the error. // TODO: do something real with the error.
console.log('recast', e) console.log('recast error', e)
throw e throw e
} }
} }

View File

@ -595,7 +595,12 @@ export class EngineCommandManager {
[localUnsubscribeId: string]: (a: any) => void [localUnsubscribeId: string]: (a: any) => void
} }
} = {} as any } = {} as any
constructor({
constructor() {
this.engineConnection = undefined
}
start({
setMediaStream, setMediaStream,
setIsStreamReady, setIsStreamReady,
width, width,
@ -608,6 +613,16 @@ export class EngineCommandManager {
height: number height: number
token?: string token?: string
}) { }) {
if (width === 0 || height === 0) {
return
}
// If we already have an engine connection, just need to resize the stream.
if (this.engineConnection) {
this.handleResize({ streamWidth: width, streamHeight: height })
return
}
this.waitForReady = new Promise((resolve) => { this.waitForReady = new Promise((resolve) => {
this.resolveReady = resolve this.resolveReady = resolve
}) })
@ -689,7 +704,35 @@ export class EngineCommandManager {
this.engineConnection?.connect() this.engineConnection?.connect()
} }
handleResize({
streamWidth,
streamHeight,
}: {
streamWidth: number
streamHeight: number
}) {
console.log('handleResize', streamWidth, streamHeight)
if (!this.engineConnection?.isReady()) {
return
}
const resizeCmd: EngineCommand = {
type: 'modeling_cmd_req',
cmd_id: uuidv4(),
cmd: {
type: 'reconfigure_stream',
width: streamWidth,
height: streamHeight,
fps: 60,
},
}
this.engineConnection?.send(resizeCmd)
}
handleModelingCommand(message: WebSocketResponse, id: string) { handleModelingCommand(message: WebSocketResponse, id: string) {
if (this.engineConnection === undefined) {
return
}
if (message.type !== 'modeling') { if (message.type !== 'modeling') {
return return
} }
@ -854,6 +897,9 @@ export class EngineCommandManager {
}) })
} }
sendSceneCommand(command: EngineCommand): Promise<any> { sendSceneCommand(command: EngineCommand): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
if ( if (
command.type === 'modeling_cmd_req' && command.type === 'modeling_cmd_req' &&
command.cmd.type !== lastMessage command.cmd.type !== lastMessage
@ -905,6 +951,9 @@ export class EngineCommandManager {
range: SourceRange range: SourceRange
command: EngineCommand | string command: EngineCommand | string
}): Promise<any> { }): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
this.sourceRangeMap[id] = range this.sourceRangeMap[id] = range
if (!this.engineConnection?.isReady()) { if (!this.engineConnection?.isReady()) {
@ -950,6 +999,9 @@ export class EngineCommandManager {
rangeStr: string, rangeStr: string,
commandStr: string commandStr: string
): Promise<any> { ): Promise<any> {
if (this.engineConnection === undefined) {
return Promise.resolve()
}
if (id === undefined) { if (id === undefined) {
throw new Error('id is undefined') throw new Error('id is undefined')
} }
@ -1000,6 +1052,9 @@ export class EngineCommandManager {
} }
} }
private async fixIdMappings(ast: Program, programMemory: ProgramMemory) { private async fixIdMappings(ast: Program, programMemory: ProgramMemory) {
if (this.engineConnection === undefined) {
return
}
/* This is a temporary solution since the cmd_ids that are sent through when /* This is a temporary solution since the cmd_ids that are sent through when
sending 'extend_path' ids are not used as the segment ids. sending 'extend_path' ids are not used as the segment ids.
@ -1079,3 +1134,5 @@ export class EngineCommandManager {
}) })
} }
} }
export const engineCommandManager = new EngineCommandManager()

View File

@ -1279,7 +1279,7 @@ export function getTransformInfos(
}) as TransformInfo[] }) as TransformInfo[]
return theTransforms return theTransforms
} catch (error) { } catch (error) {
console.log(error) console.log('error', error)
return [] return []
} }
} }

View File

@ -11,7 +11,7 @@ export async function asyncLexer(str: string): Promise<Token[]> {
return tokens return tokens
} catch (e) { } catch (e) {
// TODO: do something real with the error. // TODO: do something real with the error.
console.log('lexer', e) console.log('lexer error', e)
throw e throw e
} }
} }
@ -22,7 +22,7 @@ export function lexer(str: string): Token[] {
return tokens return tokens
} catch (e) { } catch (e) {
// TODO: do something real with the error. // TODO: do something real with the error.
console.log('lexer', e) console.log('lexer error', e)
throw e throw e
} }
} }

View File

@ -39,6 +39,6 @@ export async function exportSave(data: ArrayBuffer) {
} }
} catch (e) { } catch (e) {
// TODO: do something real with the error. // TODO: do something real with the error.
console.log('export', e) console.log('export error', e)
} }
} }

View File

@ -36,7 +36,7 @@ export async function initializeProjectDirectory(directory: string) {
try { try {
docDirectory = await documentDir() docDirectory = await documentDir()
} catch (e) { } catch (e) {
console.log(e) console.log('error', e)
docDirectory = await homeDir() // seems to work better on Linux docDirectory = await homeDir() // seems to work better on Linux
} }

View File

@ -75,11 +75,12 @@ export async function executor(
ast: Program, ast: Program,
pm: ProgramMemory = { root: {}, return: null } pm: ProgramMemory = { root: {}, return: null }
): Promise<ProgramMemory> { ): Promise<ProgramMemory> {
const engineCommandManager = new EngineCommandManager({ const engineCommandManager = new EngineCommandManager()
engineCommandManager.start({
setIsStreamReady: () => {}, setIsStreamReady: () => {},
setMediaStream: () => {}, setMediaStream: () => {},
width: 100, width: 0,
height: 100, height: 0,
}) })
await engineCommandManager.waitForReady await engineCommandManager.waitForReady
engineCommandManager.startNewSession() engineCommandManager.startNewSession()

View File

@ -19,6 +19,7 @@ import { KCLError } from './lang/errors'
import { deferExecution } from 'lib/utils' import { deferExecution } from 'lib/utils'
import { _executor } from './lang/executor' import { _executor } from './lang/executor'
import { bracket } from 'lib/exampleKcl' import { bracket } from 'lib/exampleKcl'
import { engineCommandManager } from './lang/std/engineConnection'
export type Selection = { export type Selection = {
type: 'default' | 'line-end' | 'line-mid' type: 'default' | 'line-end' | 'line-mid'
@ -162,8 +163,6 @@ export interface StoreState {
setProgramMemory: (programMemory: ProgramMemory) => void setProgramMemory: (programMemory: ProgramMemory) => void
isShiftDown: boolean isShiftDown: boolean
setIsShiftDown: (isShiftDown: boolean) => void setIsShiftDown: (isShiftDown: boolean) => void
engineCommandManager?: EngineCommandManager
setEngineCommandManager: (engineCommandManager: EngineCommandManager) => void
mediaStream?: MediaStream mediaStream?: MediaStream
setMediaStream: (mediaStream: MediaStream) => void setMediaStream: (mediaStream: MediaStream) => void
isStreamReady: boolean isStreamReady: boolean
@ -226,7 +225,7 @@ export const useStore = create<StoreState>()(
const result = await executeCode({ const result = await executeCode({
code: code || get().code, code: code || get().code,
lastAst: get().ast, lastAst: get().ast,
engineCommandManager: get().engineCommandManager, engineCommandManager: engineCommandManager,
}) })
if (!result.isChange) { if (!result.isChange) {
return return
@ -332,8 +331,6 @@ export const useStore = create<StoreState>()(
executeAst: async (ast) => { executeAst: async (ast) => {
const _ast = ast || get().ast const _ast = ast || get().ast
if (!get().isStreamReady) return if (!get().isStreamReady) return
const engineCommandManager = get().engineCommandManager!
if (!engineCommandManager) return
set({ isExecuting: true }) set({ isExecuting: true })
const { logs, errors, programMemory } = await executeAst({ const { logs, errors, programMemory } = await executeAst({
@ -350,8 +347,6 @@ export const useStore = create<StoreState>()(
executeAstMock: async (ast) => { executeAstMock: async (ast) => {
const _ast = ast || get().ast const _ast = ast || get().ast
if (!get().isStreamReady) return if (!get().isStreamReady) return
const engineCommandManager = get().engineCommandManager!
if (!engineCommandManager) return
const { logs, errors, programMemory } = await executeAst({ const { logs, errors, programMemory } = await executeAst({
ast: _ast, ast: _ast,
@ -435,8 +430,6 @@ export const useStore = create<StoreState>()(
setProgramMemory: (programMemory) => set({ programMemory }), setProgramMemory: (programMemory) => set({ programMemory }),
isShiftDown: false, isShiftDown: false,
setIsShiftDown: (isShiftDown) => set({ isShiftDown }), setIsShiftDown: (isShiftDown) => set({ isShiftDown }),
setEngineCommandManager: (engineCommandManager) =>
set({ engineCommandManager }),
setMediaStream: (mediaStream) => set({ mediaStream }), setMediaStream: (mediaStream) => set({ mediaStream }),
isStreamReady: false, isStreamReady: false,
setIsStreamReady: (isStreamReady) => set({ isStreamReady }), setIsStreamReady: (isStreamReady) => set({ isStreamReady }),
@ -454,7 +447,9 @@ export const useStore = create<StoreState>()(
fileId: '', fileId: '',
setFileId: (fileId) => set({ fileId }), setFileId: (fileId) => set({ fileId }),
streamDimensions: { streamWidth: 1280, streamHeight: 720 }, streamDimensions: { streamWidth: 1280, streamHeight: 720 },
setStreamDimensions: (streamDimensions) => set({ streamDimensions }), setStreamDimensions: (streamDimensions) => {
set({ streamDimensions })
},
isExecuting: false, isExecuting: false,
setIsExecuting: (isExecuting) => set({ isExecuting }), setIsExecuting: (isExecuting) => set({ isExecuting }),
@ -519,7 +514,7 @@ async function executeCode({
}: { }: {
code: string code: string
lastAst: Program lastAst: Program
engineCommandManager?: EngineCommandManager engineCommandManager: EngineCommandManager
}): Promise< }): Promise<
| { | {
logs: string[] logs: string[]
@ -539,7 +534,7 @@ async function executeCode({
if (e instanceof KCLError) { if (e instanceof KCLError) {
errors = [e] errors = [e]
logs = [] logs = []
if (e.msg === 'file is empty') engineCommandManager?.endSession() if (e.msg === 'file is empty') engineCommandManager.endSession()
} }
return { return {
isChange: true, isChange: true,
@ -562,7 +557,7 @@ async function executeCode({
} }
// Check if the ast we have is equal to the ast in the storage. // Check if the ast we have is equal to the ast in the storage.
// If it is, we don't need to update the ast. // If it is, we don't need to update the ast.
if (!engineCommandManager || JSON.stringify(ast) === JSON.stringify(lastAst)) if (JSON.stringify(ast) === JSON.stringify(lastAst))
return { isChange: false } return { isChange: false }
const { logs, errors, programMemory } = await executeAst({ const { logs, errors, programMemory } = await executeAst({

View File

@ -1394,7 +1394,6 @@ dependencies = [
"lazy_static", "lazy_static",
"parse-display", "parse-display",
"pretty_assertions", "pretty_assertions",
"regex",
"reqwest", "reqwest",
"schemars", "schemars",
"serde", "serde",
@ -1408,6 +1407,7 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"web-sys", "web-sys",
"winnow",
] ]
[[package]] [[package]]
@ -3081,9 +3081,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio-tungstenite" name = "tokio-tungstenite"
version = "0.20.0" version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c"
dependencies = [ dependencies = [
"futures-util", "futures-util",
"log", "log",
@ -3303,9 +3303,9 @@ dependencies = [
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.20.0" version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"bytes", "bytes",
@ -3792,6 +3792,15 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "winnow"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winreg" name = "winreg"
version = "0.50.0" version = "0.50.0"

View File

@ -18,13 +18,13 @@ derive-docs = { path = "../derive-docs" }
kittycad = { version = "0.2.25", default-features = false, features = ["js"] } kittycad = { version = "0.2.25", default-features = false, features = ["js"] }
lazy_static = "1.4.0" lazy_static = "1.4.0"
parse-display = "0.8.2" parse-display = "0.8.2"
regex = "1.7.1"
schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] } schemars = { version = "0.8", features = ["impl_json_schema", "url", "uuid1"] }
serde = { version = "1.0.188", features = ["derive"] } serde = { version = "1.0.188", features = ["derive"] }
serde_json = "1.0.107" serde_json = "1.0.107"
thiserror = "1.0.48" thiserror = "1.0.48"
ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] } ts-rs = { version = "7", package = "ts-rs-json-value", features = ["serde-json-impl", "schemars-impl", "uuid-impl"] }
uuid = { version = "1.4.1", features = ["v4", "js", "serde"] } uuid = { version = "1.4.1", features = ["v4", "js", "serde"] }
winnow = "0.5.15"
[target.'cfg(target_arch = "wasm32")'.dependencies] [target.'cfg(target_arch = "wasm32")'.dependencies]
js-sys = { version = "0.3.64" } js-sys = { version = "0.3.64" }

View File

@ -1,24 +1,32 @@
use criterion::{criterion_group, criterion_main, Criterion}; use criterion::{black_box, criterion_group, criterion_main, Criterion};
pub fn criterion_benchmark(c: &mut Criterion) { pub fn bench_lex(c: &mut Criterion) {
c.bench_function("parse + lex cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM))); c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
c.bench_function("parse + lex big kitt", |b| { c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/kittycad_svg.kcl"))) c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
}); }
c.bench_function("parse + lex pipes_on_pipes", |b| {
b.iter(|| lex_and_parse(include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl"))) pub fn bench_lex_parse(c: &mut Criterion) {
}); c.bench_function("parse_lex_cube", |b| b.iter(|| lex_and_parse(CUBE_PROGRAM)));
c.bench_function("parse_lex_big_kitt", |b| b.iter(|| lex_and_parse(KITT_PROGRAM)));
c.bench_function("parse_lex_pipes_on_pipes", |b| b.iter(|| lex_and_parse(PIPES_PROGRAM)));
}
fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program));
} }
fn lex_and_parse(program: &str) { fn lex_and_parse(program: &str) {
let tokens = kcl_lib::tokeniser::lexer(program); let tokens = kcl_lib::token::lexer(program);
let parser = kcl_lib::parser::Parser::new(tokens); let parser = kcl_lib::parser::Parser::new(tokens);
parser.ast().unwrap(); black_box(parser.ast().unwrap());
} }
criterion_group!(benches, criterion_benchmark); criterion_group!(benches, bench_lex, bench_lex_parse);
criterion_main!(benches); criterion_main!(benches);
const KITT_PROGRAM: &str = include_str!("../../tests/executor/inputs/kittycad_svg.kcl");
const PIPES_PROGRAM: &str = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => { const CUBE_PROGRAM: &str = r#"fn cube = (pos, scale) => {
const sg = startSketchAt(pos) const sg = startSketchAt(pos)
|> line([0, scale], %) |> line([0, scale], %)

View File

@ -709,7 +709,6 @@ dependencies = [
"kittycad", "kittycad",
"lazy_static", "lazy_static",
"parse-display", "parse-display",
"regex",
"reqwest", "reqwest",
"schemars", "schemars",
"serde", "serde",
@ -723,6 +722,7 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"web-sys", "web-sys",
"winnow",
] ]
[[package]] [[package]]
@ -1878,9 +1878,9 @@ dependencies = [
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.20.0" version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"bytes", "bytes",
@ -2158,6 +2158,15 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "winnow"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winreg" name = "winreg"
version = "0.50.0" version = "0.50.0"

View File

@ -166,7 +166,7 @@ pub async fn modify_ast_for_sketch(
let recasted = program.recast(&FormatOptions::default(), 0); let recasted = program.recast(&FormatOptions::default(), 0);
// Re-parse the ast so we get the correct source ranges. // Re-parse the ast so we get the correct source ranges.
let tokens = crate::tokeniser::lexer(&recasted); let tokens = crate::token::lexer(&recasted);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
*program = parser.ast()?; *program = parser.ast()?;

View File

@ -2691,7 +2691,7 @@ fn ghi = (x) => {
} }
show(part001)"#; show(part001)"#;
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
let symbols = program.get_lsp_symbols(code); let symbols = program.get_lsp_symbols(code);
@ -2719,7 +2719,7 @@ show(part001)
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0]) let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|> line([0.4900857016, -0.0240763666], %) |> line([0.4900857016, -0.0240763666], %)
|> line([0.6804562304, 0.9087880491], %)"#; |> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2738,7 +2738,7 @@ show(part001)
let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0]) let some_program_string = r#"const part001 = startSketchAt([0.0, 5.0])
|> line([0.4900857016, -0.0240763666], %) // hello world |> line([0.4900857016, -0.0240763666], %) // hello world
|> line([0.6804562304, 0.9087880491], %)"#; |> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2757,7 +2757,7 @@ show(part001)
|> line([0.4900857016, -0.0240763666], %) |> line([0.4900857016, -0.0240763666], %)
// hello world // hello world
|> line([0.6804562304, 0.9087880491], %)"#; |> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2783,7 +2783,7 @@ show(part001)
// this is also a comment // this is also a comment
return things return things
}"#; }"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2820,7 +2820,7 @@ const mySk1 = startSketchAt([0, 0])
|> ry(45, %) |> ry(45, %)
|> rx(45, %) |> rx(45, %)
// one more for good measure"#; // one more for good measure"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2859,7 +2859,7 @@ a comment between pipe expression statements */
|> line([-0.42, -1.72], %) |> line([-0.42, -1.72], %)
show(part001)"#; show(part001)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2885,7 +2885,7 @@ const yo = [
" hey oooooo really long long long" " hey oooooo really long long long"
] ]
"#; "#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2903,7 +2903,7 @@ const key = 'c'
const things = "things" const things = "things"
// this is also a comment"#; // this is also a comment"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2921,7 +2921,7 @@ const things = "things"
// a comment // a comment
" "
}"#; }"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2946,7 +2946,7 @@ const part001 = startSketchAt([0, 0])
-angleToMatchLengthY('seg01', myVar, %), -angleToMatchLengthY('seg01', myVar, %),
myVar myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#; ], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -2972,7 +2972,7 @@ const part001 = startSketchAt([0, 0])
myVar myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper ], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
"#; "#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -3003,7 +3003,7 @@ fn ghi = (part001) => {
} }
show(part001)"#; show(part001)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap(); let mut program = parser.ast().unwrap();
program.rename_symbol("mySuperCoolPart", 6); program.rename_symbol("mySuperCoolPart", 6);
@ -3034,7 +3034,7 @@ show(mySuperCoolPart)
let some_program_string = r#"fn ghi = (x, y, z) => { let some_program_string = r#"fn ghi = (x, y, z) => {
return x return x
}"#; }"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap(); let mut program = parser.ast().unwrap();
program.rename_symbol("newName", 10); program.rename_symbol("newName", 10);
@ -3063,7 +3063,7 @@ const firstExtrude = startSketchAt([0,0])
|> extrude(h, %) |> extrude(h, %)
show(firstExtrude)"#; show(firstExtrude)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -3089,7 +3089,7 @@ show(firstExtrude)
#[tokio::test(flavor = "multi_thread")] #[tokio::test(flavor = "multi_thread")]
async fn test_recast_math_start_negative() { async fn test_recast_math_start_negative() {
let some_program_string = r#"const myVar = -5 + 6"#; let some_program_string = r#"const myVar = -5 + 6"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();
@ -3105,7 +3105,7 @@ const FOS = 2
const sigmaAllow = 8 const sigmaAllow = 8
const width = 20 const width = 20
const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#; const thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap(); let program = parser.ast().unwrap();

View File

@ -620,6 +620,22 @@ pub async fn execute(
let result = call_expr.execute(memory, &mut pipe_info, engine).await?; let result = call_expr.execute(memory, &mut pipe_info, engine).await?;
args.push(result); args.push(result);
} }
Value::BinaryExpression(binary_expression) => {
let result = binary_expression.get_result(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::UnaryExpression(unary_expression) => {
let result = unary_expression.get_result(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::ObjectExpression(object_expression) => {
let result = object_expression.execute(memory, &mut pipe_info, engine).await?;
args.push(result);
}
Value::ArrayExpression(array_expression) => {
let result = array_expression.execute(memory, &mut pipe_info, engine).await?;
args.push(result);
}
// We do nothing for the rest. // We do nothing for the rest.
_ => (), _ => (),
} }
@ -679,7 +695,7 @@ pub async fn execute(
message: format!( message: format!(
"Expected {} arguments, got {}", "Expected {} arguments, got {}",
function_expression.params.len(), function_expression.params.len(),
args.len() args.len(),
), ),
source_ranges: vec![(&function_expression).into()], source_ranges: vec![(&function_expression).into()],
})); }));
@ -804,7 +820,7 @@ mod tests {
use super::*; use super::*;
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> { pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?; let program = parser.ast()?;
let mut mem: ProgramMemory = Default::default(); let mut mem: ProgramMemory = Default::default();

View File

@ -9,4 +9,4 @@ pub mod math_parser;
pub mod parser; pub mod parser;
pub mod server; pub mod server;
pub mod std; pub mod std;
pub mod tokeniser; pub mod token;

View File

@ -10,8 +10,8 @@ use crate::{
}, },
errors::{KclError, KclErrorDetails}, errors::{KclError, KclErrorDetails},
executor::SourceRange, executor::SourceRange,
parser::{is_not_code_token, Parser}, parser::Parser,
tokeniser::{Token, TokenType}, token::{Token, TokenType},
}; };
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)] #[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
@ -334,7 +334,7 @@ impl ReversePolishNotation {
return rpn.parse(); return rpn.parse();
} }
if is_not_code_token(current_token) { if !current_token.is_code_token() {
let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators); let rpn = ReversePolishNotation::new(&self.parser.tokens[1..], &self.previous_postfix, &self.operators);
return rpn.parse(); return rpn.parse();
} }
@ -704,7 +704,7 @@ mod test {
#[test] #[test]
fn test_parse_expression() { fn test_parse_expression() {
let tokens = crate::tokeniser::lexer("1 + 2"); let tokens = crate::token::lexer("1 + 2");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -731,7 +731,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_add_no_spaces() { fn test_parse_expression_add_no_spaces() {
let tokens = crate::tokeniser::lexer("1+2"); let tokens = crate::token::lexer("1+2");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -758,7 +758,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_sub_no_spaces() { fn test_parse_expression_sub_no_spaces() {
let tokens = crate::tokeniser::lexer("1 -2"); let tokens = crate::token::lexer("1 -2");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -785,7 +785,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_plus_followed_by_star() { fn test_parse_expression_plus_followed_by_star() {
let tokens = crate::tokeniser::lexer("1 + 2 * 3"); let tokens = crate::token::lexer("1 + 2 * 3");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -823,7 +823,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_with_parentheses() { fn test_parse_expression_with_parentheses() {
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )"); let tokens = crate::token::lexer("1 * ( 2 + 3 )");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -861,7 +861,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_parens_in_middle() { fn test_parse_expression_parens_in_middle() {
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4"); let tokens = crate::token::lexer("1 * ( 2 + 3 ) / 4");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -910,7 +910,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_parans_and_predence() { fn test_parse_expression_parans_and_predence() {
let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4"); let tokens = crate::token::lexer("1 + ( 2 + 3 ) / 4");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -958,7 +958,7 @@ mod test {
} }
#[test] #[test]
fn test_parse_expression_nested() { fn test_parse_expression_nested() {
let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )"); let tokens = crate::token::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -1017,7 +1017,7 @@ mod test {
} }
#[test] #[test]
fn test_parse_expression_redundant_braces() { fn test_parse_expression_redundant_braces() {
let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))"); let tokens = crate::token::lexer("1 * ((( 2 + 3 )))");
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -1055,7 +1055,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_simple() { fn test_reverse_polish_notation_simple() {
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2"), &[], &[]); let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2"), &[], &[]);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
result, result,
@ -1084,7 +1084,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_complex() { fn test_reverse_polish_notation_complex() {
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 + 2 * 3"), &[], &[]); let parser = ReversePolishNotation::new(&crate::token::lexer("1 + 2 * 3"), &[], &[]);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
result, result,
@ -1125,7 +1125,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_complex_with_parentheses() { fn test_reverse_polish_notation_complex_with_parentheses() {
let parser = ReversePolishNotation::new(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &[], &[]); let parser = ReversePolishNotation::new(&crate::token::lexer("1 * ( 2 + 3 )"), &[], &[]);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
result, result,
@ -1179,7 +1179,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_redundant_braces_around_literal() { fn test_parse_expression_redundant_braces_around_literal() {
let code = "2 + (((3)))"; let code = "2 + (((3)))";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse().unwrap(); let result = parser.parse().unwrap();
assert_eq!( assert_eq!(
@ -1274,7 +1274,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_braces_around_lots_of_math() { fn test_parse_expression_braces_around_lots_of_math() {
let code = "(distance * p * FOS * 6 / (sigmaAllow * width))"; let code = "(distance * p * FOS * 6 / (sigmaAllow * width))";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse(); let result = parser.parse();
assert!(result.is_ok()); assert!(result.is_ok());
@ -1283,7 +1283,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_braces_around_internals_lots_of_math() { fn test_parse_expression_braces_around_internals_lots_of_math() {
let code = "distance * p * FOS * 6 / (sigmaAllow * width)"; let code = "distance * p * FOS * 6 / (sigmaAllow * width)";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let mut parser = MathParser::new(&tokens); let mut parser = MathParser::new(&tokens);
let result = parser.parse(); let result = parser.parse();
assert!(result.is_ok()); assert!(result.is_ok());

View File

@ -10,7 +10,7 @@ use crate::{
}, },
errors::{KclError, KclErrorDetails}, errors::{KclError, KclErrorDetails},
math_parser::MathParser, math_parser::MathParser,
tokeniser::{Token, TokenType}, token::{Token, TokenType},
}; };
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%"; pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
@ -249,7 +249,7 @@ impl Parser {
} }
let current_token = self.get_token(index)?; let current_token = self.get_token(index)?;
if is_not_code_token(current_token) { if !current_token.is_code_token() {
return self.find_end_of_non_code_node(index + 1); return self.find_end_of_non_code_node(index + 1);
} }
@ -262,7 +262,7 @@ impl Parser {
} }
let current_token = self.get_token(index)?; let current_token = self.get_token(index)?;
if is_not_code_token(current_token) { if !current_token.is_code_token() {
return self.find_start_of_non_code_node(index - 1); return self.find_start_of_non_code_node(index - 1);
} }
@ -365,7 +365,7 @@ impl Parser {
}); });
}; };
if is_not_code_token(token) { if !token.is_code_token() {
let non_code_node = self.make_non_code_node(new_index)?; let non_code_node = self.make_non_code_node(new_index)?;
let new_new_index = non_code_node.1 + 1; let new_new_index = non_code_node.1 + 1;
let bonus_non_code_node = non_code_node.0; let bonus_non_code_node = non_code_node.0;
@ -1623,7 +1623,7 @@ impl Parser {
}); });
} }
if is_not_code_token(token) { if !token.is_code_token() {
let next_token = self.next_meaningful_token(token_index, Some(0))?; let next_token = self.next_meaningful_token(token_index, Some(0))?;
if let Some(node) = &next_token.non_code_node { if let Some(node) = &next_token.non_code_node {
if previous_body.is_empty() { if previous_body.is_empty() {
@ -1788,12 +1788,6 @@ impl Parser {
} }
} }
pub fn is_not_code_token(token: &Token) -> bool {
token.token_type == TokenType::Whitespace
|| token.token_type == TokenType::LineComment
|| token.token_type == TokenType::BlockComment
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
@ -1803,7 +1797,7 @@ mod tests {
#[test] #[test]
fn test_make_identifier() { fn test_make_identifier() {
let tokens = crate::tokeniser::lexer("a"); let tokens = crate::token::lexer("a");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let identifier = parser.make_identifier(0).unwrap(); let identifier = parser.make_identifier(0).unwrap();
assert_eq!( assert_eq!(
@ -1818,7 +1812,7 @@ mod tests {
#[test] #[test]
fn test_make_identifier_with_const_myvar_equals_5_and_index_2() { fn test_make_identifier_with_const_myvar_equals_5_and_index_2() {
let tokens = crate::tokeniser::lexer("const myVar = 5"); let tokens = crate::token::lexer("const myVar = 5");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let identifier = parser.make_identifier(2).unwrap(); let identifier = parser.make_identifier(2).unwrap();
assert_eq!( assert_eq!(
@ -1833,7 +1827,7 @@ mod tests {
#[test] #[test]
fn test_make_identifier_multiline() { fn test_make_identifier_multiline() {
let tokens = crate::tokeniser::lexer("const myVar = 5\nconst newVar = myVar + 1"); let tokens = crate::token::lexer("const myVar = 5\nconst newVar = myVar + 1");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let identifier = parser.make_identifier(2).unwrap(); let identifier = parser.make_identifier(2).unwrap();
assert_eq!( assert_eq!(
@ -1857,7 +1851,7 @@ mod tests {
#[test] #[test]
fn test_make_identifier_call_expression() { fn test_make_identifier_call_expression() {
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)"); let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let identifier = parser.make_identifier(0).unwrap(); let identifier = parser.make_identifier(0).unwrap();
assert_eq!( assert_eq!(
@ -1880,7 +1874,7 @@ mod tests {
} }
#[test] #[test]
fn test_make_non_code_node() { fn test_make_non_code_node() {
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)"); let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let index = 4; let index = 4;
let expected_output = (None, 4); let expected_output = (None, 4);
@ -1889,7 +1883,7 @@ mod tests {
let index = 7; let index = 7;
let expected_output = (None, 7); let expected_output = (None, 7);
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output); assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#" r#"
const yo = { a: { b: { c: '123' } } } const yo = { a: { b: { c: '123' } } }
// this is a comment // this is a comment
@ -1920,7 +1914,7 @@ const key = 'c'"#,
31, 31,
); );
assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output); assert_eq!(parser.make_non_code_node(index).unwrap(), expected_output);
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const mySketch = startSketchAt([0,0]) r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %) |> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is |> lineTo([1, 1], %) /* this is
@ -1946,7 +1940,7 @@ const key = 'c'"#,
#[test] #[test]
fn test_collect_object_keys() { fn test_collect_object_keys() {
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]"); let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let keys_info = parser.collect_object_keys(6, None, false).unwrap(); let keys_info = parser.collect_object_keys(6, None, false).unwrap();
assert_eq!(keys_info.len(), 2); assert_eq!(keys_info.len(), 2);
@ -1966,7 +1960,7 @@ const key = 'c'"#,
#[test] #[test]
fn test_make_literal_call_expression() { fn test_make_literal_call_expression() {
let tokens = crate::tokeniser::lexer("log(5, \"hello\", aIdentifier)"); let tokens = crate::token::lexer("log(5, \"hello\", aIdentifier)");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let literal = parser.make_literal(2).unwrap(); let literal = parser.make_literal(2).unwrap();
assert_eq!( assert_eq!(
@ -1990,74 +1984,88 @@ const key = 'c'"#,
); );
} }
#[test]
fn test_is_code_token() {
let tokens = [
Token {
token_type: TokenType::Word,
start: 0,
end: 3,
value: "log".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 3,
end: 4,
value: "(".to_string(),
},
Token {
token_type: TokenType::Number,
start: 4,
end: 5,
value: "5".to_string(),
},
Token {
token_type: TokenType::Comma,
start: 5,
end: 6,
value: ",".to_string(),
},
Token {
token_type: TokenType::String,
start: 7,
end: 14,
value: "\"hello\"".to_string(),
},
Token {
token_type: TokenType::Word,
start: 16,
end: 27,
value: "aIdentifier".to_string(),
},
Token {
token_type: TokenType::Brace,
start: 27,
end: 28,
value: ")".to_string(),
},
];
for (i, token) in tokens.iter().enumerate() {
assert!(token.is_code_token(), "failed test {i}: {token:?}")
}
}
#[test] #[test]
fn test_is_not_code_token() { fn test_is_not_code_token() {
assert!(!is_not_code_token(&Token { let tokens = [
token_type: TokenType::Word, Token {
start: 0, token_type: TokenType::Whitespace,
end: 3, start: 6,
value: "log".to_string(), end: 7,
})); value: " ".to_string(),
assert!(!is_not_code_token(&Token { },
token_type: TokenType::Brace, Token {
start: 3, token_type: TokenType::BlockComment,
end: 4, start: 28,
value: "(".to_string(), end: 30,
})); value: "/* abte */".to_string(),
assert!(!is_not_code_token(&Token { },
token_type: TokenType::Number, Token {
start: 4, token_type: TokenType::LineComment,
end: 5, start: 30,
value: "5".to_string(), end: 33,
})); value: "// yoyo a line".to_string(),
assert!(!is_not_code_token(&Token { },
token_type: TokenType::Comma, ];
start: 5, for (i, token) in tokens.iter().enumerate() {
end: 6, assert!(!token.is_code_token(), "failed test {i}: {token:?}")
value: ",".to_string(), }
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::Whitespace,
start: 6,
end: 7,
value: " ".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::String,
start: 7,
end: 14,
value: "\"hello\"".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Word,
start: 16,
end: 27,
value: "aIdentifier".to_string(),
}));
assert!(!is_not_code_token(&Token {
token_type: TokenType::Brace,
start: 27,
end: 28,
value: ")".to_string(),
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::BlockComment,
start: 28,
end: 30,
value: "/* abte */".to_string(),
}));
assert!(is_not_code_token(&Token {
token_type: TokenType::LineComment,
start: 30,
end: 33,
value: "// yoyo a line".to_string(),
}));
} }
#[test] #[test]
fn test_next_meaningful_token() { fn test_next_meaningful_token() {
let _offset = 1; let _offset = 1;
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const mySketch = startSketchAt([0,0]) r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %) |> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is |> lineTo([1, 1], %) /* this is
@ -2443,7 +2451,7 @@ const key = 'c'"#,
#[test] #[test]
fn test_find_closing_brace() { fn test_find_closing_brace() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const mySketch = startSketchAt([0,0]) r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %) |> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is |> lineTo([1, 1], %) /* this is
@ -2460,16 +2468,16 @@ const key = 'c'"#,
assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92); assert_eq!(parser.find_closing_brace(90, 0, "").unwrap(), 92);
let basic = "( hey )"; let basic = "( hey )";
let parser = Parser::new(crate::tokeniser::lexer(basic)); let parser = Parser::new(crate::token::lexer(basic));
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4); assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 4);
let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)"; let handles_non_zero_index = "(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)";
let parser = Parser::new(crate::tokeniser::lexer(handles_non_zero_index)); let parser = Parser::new(crate::token::lexer(handles_non_zero_index));
assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4); assert_eq!(parser.find_closing_brace(2, 0, "").unwrap(), 4);
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6); assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 6);
let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }"; let handles_nested = "{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }";
let parser = Parser::new(crate::tokeniser::lexer(handles_nested)); let parser = Parser::new(crate::token::lexer(handles_nested));
assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18); assert_eq!(parser.find_closing_brace(0, 0, "").unwrap(), 18);
// TODO expect error when not started on a brace // TODO expect error when not started on a brace
@ -2477,7 +2485,7 @@ const key = 'c'"#,
#[test] #[test]
fn test_is_call_expression() { fn test_is_call_expression() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const mySketch = startSketchAt([0,0]) r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %) |> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is |> lineTo([1, 1], %) /* this is
@ -2498,7 +2506,7 @@ const key = 'c'"#,
#[test] #[test]
fn test_find_next_declaration_keyword() { fn test_find_next_declaration_keyword() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const mySketch = startSketchAt([0,0]) r#"const mySketch = startSketchAt([0,0])
|> lineTo({ to: [0, 1], tag: 'myPath' }, %) |> lineTo({ to: [0, 1], tag: 'myPath' }, %)
|> lineTo([1, 1], %) /* this is |> lineTo([1, 1], %) /* this is
@ -2513,7 +2521,7 @@ const key = 'c'"#,
TokenReturn { token: None, index: 92 } TokenReturn { token: None, index: 92 }
); );
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const myVar = 5 r#"const myVar = 5
const newVar = myVar + 1 const newVar = myVar + 1
"#, "#,
@ -2543,7 +2551,7 @@ const newVar = myVar + 1
lineTo(2, 3) lineTo(2, 3)
} |> rx(45, %) } |> rx(45, %)
"#; "#;
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
assert_eq!( assert_eq!(
parser.has_pipe_operator(0, None).unwrap(), parser.has_pipe_operator(0, None).unwrap(),
@ -2562,7 +2570,7 @@ const newVar = myVar + 1
lineTo(2, 3) lineTo(2, 3)
} |> rx(45, %) |> rx(45, %) } |> rx(45, %) |> rx(45, %)
"#; "#;
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
assert_eq!( assert_eq!(
parser.has_pipe_operator(0, None).unwrap(), parser.has_pipe_operator(0, None).unwrap(),
@ -2584,7 +2592,7 @@ const newVar = myVar + 1
const yo = myFunc(9() const yo = myFunc(9()
|> rx(45, %) |> rx(45, %)
"#; "#;
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
assert_eq!( assert_eq!(
parser.has_pipe_operator(0, None).unwrap(), parser.has_pipe_operator(0, None).unwrap(),
@ -2596,7 +2604,7 @@ const yo = myFunc(9()
); );
let code = "const myVar2 = 5 + 1 |> myFn(%)"; let code = "const myVar2 = 5 + 1 |> myFn(%)";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
assert_eq!( assert_eq!(
parser.has_pipe_operator(1, None).unwrap(), parser.has_pipe_operator(1, None).unwrap(),
@ -2618,7 +2626,7 @@ const yo = myFunc(9()
lineTo(1,1) lineTo(1,1)
} |> rx(90, %) } |> rx(90, %)
show(mySk1)"#; show(mySk1)"#;
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap(); let token_with_my_path_index = tokens.iter().position(|token| token.value == "myPath").unwrap();
// loop through getting the token and it's index // loop through getting the token and it's index
@ -2658,7 +2666,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_make_member_expression() { fn test_make_member_expression() {
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]"); let tokens = crate::token::lexer("const prop = yo.one[\"two\"]");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let member_expression_return = parser.make_member_expression(6).unwrap(); let member_expression_return = parser.make_member_expression(6).unwrap();
let member_expression = member_expression_return.expression; let member_expression = member_expression_return.expression;
@ -2700,63 +2708,63 @@ show(mySk1)"#;
#[test] #[test]
fn test_find_end_of_binary_expression() { fn test_find_end_of_binary_expression() {
let code = "1 + 2 * 3\nconst yo = 5"; let code = "1 + 2 * 3\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3"); assert_eq!(tokens[end].value, "3");
let code = "(1 + 25) / 5 - 3\nconst yo = 5"; let code = "(1 + 25) / 5 - 3\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3"); assert_eq!(tokens[end].value, "3");
let index_of_5 = code.find('5').unwrap(); let index_of_5 = code.find('5').unwrap();
let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap(); let end_starting_at_the_5 = parser.find_end_of_binary_expression(index_of_5).unwrap();
assert_eq!(end_starting_at_the_5, end); assert_eq!(end_starting_at_the_5, end);
// whole thing wraped // whole thing wrapped
let code = "((1 + 2) / 5 - 3)\nconst yo = 5"; let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2); assert_eq!(tokens[end].end, code.find("3)").unwrap() + 2);
// whole thing wraped but given index after the first brace // whole thing wrapped but given index after the first brace
let code = "((1 + 2) / 5 - 3)\nconst yo = 5"; let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(1).unwrap(); let end = parser.find_end_of_binary_expression(1).unwrap();
assert_eq!(tokens[end].value, "3"); assert_eq!(tokens[end].value, "3");
// given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)' // given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)'
let code = "((1 + 2) / 5 - 3)\nconst yo = 5"; let code = "((1 + 2) / 5 - 3)\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(2).unwrap(); let end = parser.find_end_of_binary_expression(2).unwrap();
assert_eq!(tokens[end].value, "2"); assert_eq!(tokens[end].value, "2");
// lots of silly nesting // lots of silly nesting
let code = "(1 + 2) / (5 - (3))\nconst yo = 5"; let code = "(1 + 2) / (5 - (3))\nconst yo = 5";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2); assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
// with pipe operator at the end // with pipe operator at the end
let code = "(1 + 2) / (5 - (3))\n |> fn(%)"; let code = "(1 + 2) / (5 - (3))\n |> fn(%)";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].end, code.find("))").unwrap() + 2); assert_eq!(tokens[end].end, code.find("))").unwrap() + 2);
// with call expression at the start of binary expression // with call expression at the start of binary expression
let code = "yo(2) + 3\n |> fn(%)"; let code = "yo(2) + 3\n |> fn(%)";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "3"); assert_eq!(tokens[end].value, "3");
// with call expression at the end of binary expression // with call expression at the end of binary expression
let code = "3 + yo(2)\n |> fn(%)"; let code = "3 + yo(2)\n |> fn(%)";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let _end = parser.find_end_of_binary_expression(0).unwrap(); let _end = parser.find_end_of_binary_expression(0).unwrap();
// with call expression at the end of binary expression // with call expression at the end of binary expression
let code = "-legX + 2, "; let code = "-legX + 2, ";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = Parser::new(tokens.clone()); let parser = Parser::new(tokens.clone());
let end = parser.find_end_of_binary_expression(0).unwrap(); let end = parser.find_end_of_binary_expression(0).unwrap();
assert_eq!(tokens[end].value, "2"); assert_eq!(tokens[end].value, "2");
@ -2765,7 +2773,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_make_array_expression() { fn test_make_array_expression() {
// input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]} // input_index: 6, output_index: 14, output: {"type":"ArrayExpression","start":11,"end":26,"elements":[{"type":"Literal","start":12,"end":15,"value":"1","raw":"\"1\""},{"type":"Literal","start":17,"end":18,"value":2,"raw":"2"},{"type":"Identifier","start":20,"end":25,"name":"three"}]}
let tokens = crate::tokeniser::lexer("const yo = [\"1\", 2, three]"); let tokens = crate::token::lexer("const yo = [\"1\", 2, three]");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let array_expression = parser.make_array_expression(6).unwrap(); let array_expression = parser.make_array_expression(6).unwrap();
let expression = array_expression.expression; let expression = array_expression.expression;
@ -2804,7 +2812,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_make_call_expression() { fn test_make_call_expression() {
let tokens = crate::tokeniser::lexer("foo(\"a\", a, 3)"); let tokens = crate::token::lexer("foo(\"a\", a, 3)");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.make_call_expression(0).unwrap(); let result = parser.make_call_expression(0).unwrap();
assert_eq!(result.last_index, 9); assert_eq!(result.last_index, 9);
@ -2838,7 +2846,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_make_variable_declaration() { fn test_make_variable_declaration() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const yo = startSketch([0, 0]) r#"const yo = startSketch([0, 0])
|> lineTo([1, myVar], %) |> lineTo([1, myVar], %)
|> foo(myVar2, %) |> foo(myVar2, %)
@ -2908,7 +2916,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_make_body() { fn test_make_body() {
let tokens = crate::tokeniser::lexer("const myVar = 5"); let tokens = crate::token::lexer("const myVar = 5");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let body = parser let body = parser
.make_body( .make_body(
@ -2926,7 +2934,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_abstract_syntax_tree() { fn test_abstract_syntax_tree() {
let code = "5 +6"; let code = "5 +6";
let parser = Parser::new(crate::tokeniser::lexer(code)); let parser = Parser::new(crate::token::lexer(code));
let result = parser.ast().unwrap(); let result = parser.ast().unwrap();
let expected_result = Program { let expected_result = Program {
start: 0, start: 0,
@ -2964,8 +2972,8 @@ show(mySk1)"#;
#[test] #[test]
fn test_empty_file() { fn test_empty_file() {
let some_program_string = r#""#; let some_program_string = r#""#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("file is empty")); assert!(result.err().unwrap().to_string().contains("file is empty"));
@ -2973,7 +2981,7 @@ show(mySk1)"#;
#[test] #[test]
fn test_parse_half_pipe_small() { fn test_parse_half_pipe_small() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
"const secondExtrude = startSketchAt([0,0]) "const secondExtrude = startSketchAt([0,0])
|", |",
); );
@ -2985,14 +2993,14 @@ show(mySk1)"#;
#[test] #[test]
fn test_parse_member_expression_double_nested_braces() { fn test_parse_member_expression_double_nested_braces() {
let tokens = crate::tokeniser::lexer(r#"const prop = yo["one"][two]"#); let tokens = crate::token::lexer(r#"const prop = yo["one"][two]"#);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
parser.ast().unwrap(); parser.ast().unwrap();
} }
#[test] #[test]
fn test_parse_member_expression_binary_expression_period_number_first() { fn test_parse_member_expression_binary_expression_period_number_first() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = 1 - obj.a"#, const height = 1 - obj.a"#,
); );
@ -3002,7 +3010,7 @@ const height = 1 - obj.a"#,
#[test] #[test]
fn test_parse_member_expression_binary_expression_brace_number_first() { fn test_parse_member_expression_binary_expression_brace_number_first() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = 1 - obj["a"]"#, const height = 1 - obj["a"]"#,
); );
@ -3012,7 +3020,7 @@ const height = 1 - obj["a"]"#,
#[test] #[test]
fn test_parse_member_expression_binary_expression_brace_number_second() { fn test_parse_member_expression_binary_expression_brace_number_second() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = obj["a"] - 1"#, const height = obj["a"] - 1"#,
); );
@ -3022,7 +3030,7 @@ const height = obj["a"] - 1"#,
#[test] #[test]
fn test_parse_member_expression_binary_expression_in_array_number_first() { fn test_parse_member_expression_binary_expression_in_array_number_first() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = [1 - obj["a"], 0]"#, const height = [1 - obj["a"], 0]"#,
); );
@ -3032,7 +3040,7 @@ const height = [1 - obj["a"], 0]"#,
#[test] #[test]
fn test_parse_member_expression_binary_expression_in_array_number_second() { fn test_parse_member_expression_binary_expression_in_array_number_second() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] - 1, 0]"#, const height = [obj["a"] - 1, 0]"#,
); );
@ -3042,7 +3050,7 @@ const height = [obj["a"] - 1, 0]"#,
#[test] #[test]
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() { fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const obj = { a: 1, b: 2 } r#"const obj = { a: 1, b: 2 }
const height = [obj["a"] -1, 0]"#, const height = [obj["a"] -1, 0]"#,
); );
@ -3052,7 +3060,7 @@ const height = [obj["a"] -1, 0]"#,
#[test] #[test]
fn test_parse_half_pipe() { fn test_parse_half_pipe() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
"const height = 10 "const height = 10
const firstExtrude = startSketchAt([0,0]) const firstExtrude = startSketchAt([0,0])
@ -3075,15 +3083,17 @@ const secondExtrude = startSketchAt([0,0])
#[test] #[test]
fn test_parse_greater_bang() { fn test_parse_greater_bang() {
let tokens = crate::tokeniser::lexer(">!"); let tokens = crate::token::lexer(">!");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let err = parser.ast().unwrap_err();
assert!(result.is_ok()); // TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(err.to_string().contains("file is empty"));
} }
#[test] #[test]
fn test_parse_z_percent_parens() { fn test_parse_z_percent_parens() {
let tokens = crate::tokeniser::lexer("z%)"); let tokens = crate::token::lexer("z%)");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
@ -3092,15 +3102,17 @@ const secondExtrude = startSketchAt([0,0])
#[test] #[test]
fn test_parse_parens_unicode() { fn test_parse_parens_unicode() {
let tokens = crate::tokeniser::lexer(""); let tokens = crate::token::lexer("");
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_ok()); // TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert!(result.is_err());
} }
#[test] #[test]
fn test_parse_negative_in_array_binary_expression() { fn test_parse_negative_in_array_binary_expression() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"const leg1 = 5 r#"const leg1 = 5
const thickness = 0.56 const thickness = 0.56
@ -3114,7 +3126,7 @@ const bracket = [-leg2 + thickness, 0]
#[test] #[test]
fn test_parse_nested_open_brackets() { fn test_parse_nested_open_brackets() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#" r#"
z(-[["#, z(-[["#,
); );
@ -3129,31 +3141,38 @@ z(-[["#,
#[test] #[test]
fn test_parse_weird_new_line_function() { fn test_parse_weird_new_line_function() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"z r#"z
(--#"#, (--#"#,
); );
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
// TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!( assert_eq!(
result.err().unwrap().to_string(), result.err().unwrap().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([0, 1])], message: "missing a closing brace for the function call" }"# r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
); );
} }
#[test] #[test]
fn test_parse_weird_lots_of_fancy_brackets() { fn test_parse_weird_lots_of_fancy_brackets() {
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#); let tokens = crate::token::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert!(result.err().unwrap().to_string().contains("unexpected end")); // TODO: Better errors when program cannot tokenize.
// https://github.com/KittyCAD/modeling-app/issues/696
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [], message: "file is empty" }"#
);
} }
#[test] #[test]
fn test_parse_weird_close_before_open() { fn test_parse_weird_close_before_open() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"fn)n r#"fn)n
e e
["#, ["#,
@ -3170,7 +3189,7 @@ e
#[test] #[test]
fn test_parse_weird_close_before_nada() { fn test_parse_weird_close_before_nada() {
let tokens = crate::tokeniser::lexer(r#"fn)n-"#); let tokens = crate::token::lexer(r#"fn)n-"#);
let parser = Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
@ -3179,7 +3198,7 @@ e
#[test] #[test]
fn test_parse_weird_lots_of_slashes() { fn test_parse_weird_lots_of_slashes() {
let tokens = crate::tokeniser::lexer( let tokens = crate::token::lexer(
r#"J///////////o//+///////////P++++*++++++P///////˟ r#"J///////////o//+///////////P++++*++++++P///////˟
++4"#, ++4"#,
); );
@ -3196,7 +3215,7 @@ e
#[test] #[test]
fn test_parse_expand_array() { fn test_parse_expand_array() {
let code = "const myArray = [0..10]"; let code = "const myArray = [0..10]";
let parser = Parser::new(crate::tokeniser::lexer(code)); let parser = Parser::new(crate::token::lexer(code));
let result = parser.ast().unwrap(); let result = parser.ast().unwrap();
let expected_result = Program { let expected_result = Program {
start: 0, start: 0,
@ -3299,8 +3318,8 @@ e
#[test] #[test]
fn test_error_keyword_in_variable() { fn test_error_keyword_in_variable() {
let some_program_string = r#"const let = "thing""#; let some_program_string = r#"const let = "thing""#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3312,8 +3331,8 @@ e
#[test] #[test]
fn test_error_keyword_in_fn_name() { fn test_error_keyword_in_fn_name() {
let some_program_string = r#"fn let = () {}"#; let some_program_string = r#"fn let = () {}"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3325,8 +3344,8 @@ e
#[test] #[test]
fn test_error_stdlib_in_fn_name() { fn test_error_stdlib_in_fn_name() {
let some_program_string = r#"fn cos = () {}"#; let some_program_string = r#"fn cos = () {}"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3340,8 +3359,8 @@ e
let some_program_string = r#"fn thing = (let) => { let some_program_string = r#"fn thing = (let) => {
return 1 return 1
}"#; }"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3355,8 +3374,8 @@ e
let some_program_string = r#"fn thing = (cos) => { let some_program_string = r#"fn thing = (cos) => {
return 1 return 1
}"#; }"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3373,8 +3392,8 @@ e
} }
firstPrimeNumber() firstPrimeNumber()
"#; "#;
let tokens = crate::tokeniser::lexer(program); let tokens = crate::token::lexer(program);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let _ast = parser.ast().unwrap(); let _ast = parser.ast().unwrap();
} }
@ -3386,8 +3405,8 @@ e
thing(false) thing(false)
"#; "#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
parser.ast().unwrap(); parser.ast().unwrap();
} }
@ -3403,8 +3422,8 @@ thing(false)
"#, "#,
name name
); );
let tokens = crate::tokeniser::lexer(&some_program_string); let tokens = crate::token::lexer(&some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3421,8 +3440,8 @@ thing(false)
#[test] #[test]
fn test_error_define_var_as_function() { fn test_error_define_var_as_function() {
let some_program_string = r#"fn thing = "thing""#; let some_program_string = r#"fn thing = "thing""#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
let result = parser.ast(); let result = parser.ast();
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
@ -3450,8 +3469,8 @@ const pt2 = b2[0]
show(b1) show(b1)
show(b2)"#; show(b2)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
parser.ast().unwrap(); parser.ast().unwrap();
} }
@ -3459,18 +3478,36 @@ show(b2)"#;
fn test_math_with_stdlib() { fn test_math_with_stdlib() {
let some_program_string = r#"const d2r = pi() / 2 let some_program_string = r#"const d2r = pi() / 2
let other_thing = 2 * cos(3)"#; let other_thing = 2 * cos(3)"#;
let tokens = crate::tokeniser::lexer(some_program_string); let tokens = crate::token::lexer(some_program_string);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
parser.ast().unwrap(); parser.ast().unwrap();
} }
#[test] #[test]
#[ignore] // ignore until more stack fixes
fn test_parse_pipes_on_pipes() { fn test_parse_pipes_on_pipes() {
let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl"); let code = include_str!("../../tests/executor/inputs/pipes_on_pipes.kcl");
let tokens = crate::tokeniser::lexer(code); let tokens = crate::token::lexer(code);
let parser = crate::parser::Parser::new(tokens); let parser = Parser::new(tokens);
parser.ast().unwrap();
}
#[test]
fn test_negative_arguments() {
let some_program_string = r#"fn box = (p, h, l, w) => {
const myBox = startSketchAt(p)
|> line([0, l], %)
|> line([w, 0], %)
|> line([0, -l], %)
|> close(%)
|> extrude(h, %)
return myBox
}
let myBox = box([0,0], -3, -16, -10)
show(myBox)"#;
let tokens = crate::token::lexer(some_program_string);
let parser = Parser::new(tokens);
parser.ast().unwrap(); parser.ast().unwrap();
} }
} }

View File

@ -34,7 +34,7 @@ pub struct Backend {
/// The types of tokens the server supports. /// The types of tokens the server supports.
pub token_types: Vec<SemanticTokenType>, pub token_types: Vec<SemanticTokenType>,
/// Token maps. /// Token maps.
pub token_map: DashMap<String, Vec<crate::tokeniser::Token>>, pub token_map: DashMap<String, Vec<crate::token::Token>>,
/// AST maps. /// AST maps.
pub ast_map: DashMap<String, crate::ast::types::Program>, pub ast_map: DashMap<String, crate::ast::types::Program>,
/// Current code. /// Current code.
@ -56,7 +56,7 @@ impl Backend {
// Lets update the tokens. // Lets update the tokens.
self.current_code_map self.current_code_map
.insert(params.uri.to_string(), params.text.clone()); .insert(params.uri.to_string(), params.text.clone());
let tokens = crate::tokeniser::lexer(&params.text); let tokens = crate::token::lexer(&params.text);
self.token_map.insert(params.uri.to_string(), tokens.clone()); self.token_map.insert(params.uri.to_string(), tokens.clone());
// Update the semantic tokens map. // Update the semantic tokens map.
@ -69,9 +69,7 @@ impl Backend {
continue; continue;
}; };
if token.token_type == crate::tokeniser::TokenType::Word if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
&& self.stdlib_completions.contains_key(&token.value)
{
// This is a stdlib function. // This is a stdlib function.
token_type = SemanticTokenType::FUNCTION; token_type = SemanticTokenType::FUNCTION;
} }
@ -549,7 +547,7 @@ impl LanguageServer for Backend {
// Parse the ast. // Parse the ast.
// I don't know if we need to do this again since it should be updated in the context. // I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file. // But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::tokeniser::lexer(&current_code); let tokens = crate::token::lexer(&current_code);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let Ok(ast) = parser.ast() else { let Ok(ast) = parser.ast() else {
return Ok(None); return Ok(None);
@ -581,7 +579,7 @@ impl LanguageServer for Backend {
// Parse the ast. // Parse the ast.
// I don't know if we need to do this again since it should be updated in the context. // I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file. // But I figure better safe than sorry since this will write back out to the file.
let tokens = crate::tokeniser::lexer(&current_code); let tokens = crate::token::lexer(&current_code);
let parser = crate::parser::Parser::new(tokens); let parser = crate::parser::Parser::new(tokens);
let Ok(mut ast) = parser.ast() else { let Ok(mut ast) = parser.ast() else {
return Ok(None); return Ok(None);

View File

@ -856,26 +856,6 @@ async fn inner_arc(data: ArcData, sketch_group: Box<SketchGroup>, args: Args) ->
) )
.await?; .await?;
// TODO: Dont do this (move path pen) - mike
// lets review what the needs are here and see if any existing arc endpoints can accomplish this
// Move the path pen to the end of the arc.
// Since that is where we want to draw the next path.
// TODO: the engine should automatically move the pen to the end of the arc.
// This just seems inefficient.
args.send_modeling_cmd(
id,
ModelingCmd::MovePathPen {
path: sketch_group.id,
to: Point3D {
x: end.x,
y: end.y,
z: 0.0,
},
},
)
.await?;
let current_path = Path::ToPoint { let current_path = Path::ToPoint {
base: BasePath { base: BasePath {
from: from.into(), from: from.into(),

View File

@ -0,0 +1,173 @@
use std::str::FromStr;
use anyhow::Result;
use parse_display::{Display, FromStr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
mod tokeniser;
/// The types of tokens.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
#[display(style = "camelCase")]
pub enum TokenType {
/// A number.
Number,
/// A word.
Word,
/// An operator.
Operator,
/// A string.
String,
/// A keyword.
Keyword,
/// A brace.
Brace,
/// Whitespace.
Whitespace,
/// A comma.
Comma,
/// A colon.
Colon,
/// A period.
Period,
/// A double period: `..`.
DoublePeriod,
/// A line comment.
LineComment,
/// A block comment.
BlockComment,
/// A function name.
Function,
}
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
impl TryFrom<TokenType> for SemanticTokenType {
type Error = anyhow::Error;
fn try_from(token_type: TokenType) -> Result<Self> {
Ok(match token_type {
TokenType::Number => Self::NUMBER,
TokenType::Word => Self::VARIABLE,
TokenType::Keyword => Self::KEYWORD,
TokenType::Operator => Self::OPERATOR,
TokenType::String => Self::STRING,
TokenType::LineComment => Self::COMMENT,
TokenType::BlockComment => Self::COMMENT,
TokenType::Function => Self::FUNCTION,
TokenType::Whitespace
| TokenType::Brace
| TokenType::Comma
| TokenType::Colon
| TokenType::Period
| TokenType::DoublePeriod => {
anyhow::bail!("unsupported token type: {:?}", token_type)
}
})
}
}
impl TokenType {
// This is for the lsp server.
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
let mut settings = schemars::gen::SchemaSettings::openapi3();
settings.inline_subschemas = true;
let mut generator = schemars::gen::SchemaGenerator::new(settings);
let schema = TokenType::json_schema(&mut generator);
let schemars::schema::Schema::Object(o) = &schema else {
anyhow::bail!("expected object schema: {:#?}", schema);
};
let Some(subschemas) = &o.subschemas else {
anyhow::bail!("expected subschemas: {:#?}", schema);
};
let Some(one_ofs) = &subschemas.one_of else {
anyhow::bail!("expected one_of: {:#?}", schema);
};
let mut semantic_tokens = vec![];
for one_of in one_ofs {
let schemars::schema::Schema::Object(o) = one_of else {
anyhow::bail!("expected object one_of: {:#?}", one_of);
};
let Some(enum_values) = o.enum_values.as_ref() else {
anyhow::bail!("expected enum values: {:#?}", o);
};
if enum_values.len() > 1 {
anyhow::bail!("expected only one enum value: {:#?}", o);
}
if enum_values.is_empty() {
anyhow::bail!("expected at least one enum value: {:#?}", o);
}
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
semantic_tokens.push(semantic_token_type);
}
}
Ok(semantic_tokens)
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
#[ts(export)]
pub struct Token {
#[serde(rename = "type")]
pub token_type: TokenType,
/// Offset in the source code where this token begins.
pub start: usize,
/// Offset in the source code where this token ends.
pub end: usize,
pub value: String,
}
impl Token {
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
Self {
start: range.start,
end: range.end,
value,
token_type,
}
}
pub fn is_code_token(&self) -> bool {
!matches!(
self.token_type,
TokenType::Whitespace | TokenType::LineComment | TokenType::BlockComment
)
}
}
impl From<Token> for crate::executor::SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
}
}
impl From<&Token> for crate::executor::SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
}
}
pub fn lexer(s: &str) -> Vec<Token> {
tokeniser::lexer(s).unwrap_or_default()
}
#[cfg(test)]
mod tests {
use super::*;
// We have this as a test so we can ensure it never panics with an unwrap in the server.
#[test]
fn test_token_type_to_semantic_token_type() {
let semantic_types = TokenType::all_semantic_token_types().unwrap();
assert!(!semantic_types.is_empty());
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,749 +0,0 @@
use std::str::FromStr;
use anyhow::Result;
use lazy_static::lazy_static;
use parse_display::{Display, FromStr};
use regex::bytes::Regex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use tower_lsp::lsp_types::SemanticTokenType;
/// The types of tokens.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
#[display(style = "camelCase")]
pub enum TokenType {
/// A number.
Number,
/// A word.
Word,
/// An operator.
Operator,
/// A string.
String,
/// A keyword.
Keyword,
/// A brace.
Brace,
/// Whitespace.
Whitespace,
/// A comma.
Comma,
/// A colon.
Colon,
/// A period.
Period,
/// A double period: `..`.
DoublePeriod,
/// A line comment.
LineComment,
/// A block comment.
BlockComment,
/// A function name.
Function,
}
/// Most KCL tokens correspond to LSP semantic tokens (but not all).
impl TryFrom<TokenType> for SemanticTokenType {
type Error = anyhow::Error;
fn try_from(token_type: TokenType) -> Result<Self> {
Ok(match token_type {
TokenType::Number => Self::NUMBER,
TokenType::Word => Self::VARIABLE,
TokenType::Keyword => Self::KEYWORD,
TokenType::Operator => Self::OPERATOR,
TokenType::String => Self::STRING,
TokenType::LineComment => Self::COMMENT,
TokenType::BlockComment => Self::COMMENT,
TokenType::Function => Self::FUNCTION,
TokenType::Whitespace
| TokenType::Brace
| TokenType::Comma
| TokenType::Colon
| TokenType::Period
| TokenType::DoublePeriod => {
anyhow::bail!("unsupported token type: {:?}", token_type)
}
})
}
}
impl TokenType {
// This is for the lsp server.
pub fn all_semantic_token_types() -> Result<Vec<SemanticTokenType>> {
let mut settings = schemars::gen::SchemaSettings::openapi3();
settings.inline_subschemas = true;
let mut generator = schemars::gen::SchemaGenerator::new(settings);
let schema = TokenType::json_schema(&mut generator);
let schemars::schema::Schema::Object(o) = &schema else {
anyhow::bail!("expected object schema: {:#?}", schema);
};
let Some(subschemas) = &o.subschemas else {
anyhow::bail!("expected subschemas: {:#?}", schema);
};
let Some(one_ofs) = &subschemas.one_of else {
anyhow::bail!("expected one_of: {:#?}", schema);
};
let mut semantic_tokens = vec![];
for one_of in one_ofs {
let schemars::schema::Schema::Object(o) = one_of else {
anyhow::bail!("expected object one_of: {:#?}", one_of);
};
let Some(enum_values) = o.enum_values.as_ref() else {
anyhow::bail!("expected enum values: {:#?}", o);
};
if enum_values.len() > 1 {
anyhow::bail!("expected only one enum value: {:#?}", o);
}
if enum_values.is_empty() {
anyhow::bail!("expected at least one enum value: {:#?}", o);
}
let label = TokenType::from_str(&enum_values[0].to_string().replace('"', ""))?;
if let Ok(semantic_token_type) = SemanticTokenType::try_from(label) {
semantic_tokens.push(semantic_token_type);
}
}
Ok(semantic_tokens)
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone, ts_rs::TS)]
#[ts(export)]
pub struct Token {
#[serde(rename = "type")]
pub token_type: TokenType,
/// Offset in the source code where this token begins.
pub start: usize,
/// Offset in the source code where this token ends.
pub end: usize,
pub value: String,
}
impl From<Token> for crate::executor::SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
}
}
impl From<&Token> for crate::executor::SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
}
}
lazy_static! {
static ref NUMBER: Regex = Regex::new(r"^(\d+(\.\d*)?|\.\d+)\b").unwrap();
static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
// TODO: these should be generated using our struct types for these.
static ref KEYWORD: Regex =
Regex::new(r"^(if|else|for|while|return|break|continue|fn|let|mut|loop|true|false|nil|and|or|not|var|const)\b").unwrap();
static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
static ref PARAN_END: Regex = Regex::new(r"^\)").unwrap();
static ref ARRAY_START: Regex = Regex::new(r"^\[").unwrap();
static ref ARRAY_END: Regex = Regex::new(r"^\]").unwrap();
static ref COMMA: Regex = Regex::new(r"^,").unwrap();
static ref COLON: Regex = Regex::new(r"^:").unwrap();
static ref PERIOD: Regex = Regex::new(r"^\.").unwrap();
static ref DOUBLE_PERIOD: Regex = Regex::new(r"^\.\.").unwrap();
static ref LINECOMMENT: Regex = Regex::new(r"^//.*").unwrap();
static ref BLOCKCOMMENT: Regex = Regex::new(r"^/\*[\s\S]*?\*/").unwrap();
}
fn is_number(character: &[u8]) -> bool {
NUMBER.is_match(character)
}
fn is_whitespace(character: &[u8]) -> bool {
WHITESPACE.is_match(character)
}
fn is_word(character: &[u8]) -> bool {
WORD.is_match(character)
}
fn is_keyword(character: &[u8]) -> bool {
KEYWORD.is_match(character)
}
fn is_string(character: &[u8]) -> bool {
match STRING.find(character) {
Some(m) => m.start() == 0,
None => false,
}
}
fn is_operator(character: &[u8]) -> bool {
OPERATOR.is_match(character)
}
fn is_block_start(character: &[u8]) -> bool {
BLOCK_START.is_match(character)
}
fn is_block_end(character: &[u8]) -> bool {
BLOCK_END.is_match(character)
}
fn is_paren_start(character: &[u8]) -> bool {
PARAN_START.is_match(character)
}
fn is_paren_end(character: &[u8]) -> bool {
PARAN_END.is_match(character)
}
fn is_array_start(character: &[u8]) -> bool {
ARRAY_START.is_match(character)
}
fn is_array_end(character: &[u8]) -> bool {
ARRAY_END.is_match(character)
}
fn is_comma(character: &[u8]) -> bool {
COMMA.is_match(character)
}
fn is_colon(character: &[u8]) -> bool {
COLON.is_match(character)
}
fn is_double_period(character: &[u8]) -> bool {
DOUBLE_PERIOD.is_match(character)
}
fn is_period(character: &[u8]) -> bool {
PERIOD.is_match(character)
}
fn is_line_comment(character: &[u8]) -> bool {
LINECOMMENT.is_match(character)
}
fn is_block_comment(character: &[u8]) -> bool {
BLOCKCOMMENT.is_match(character)
}
fn match_first(s: &[u8], regex: &Regex) -> Option<String> {
regex
.find(s)
.map(|the_match| String::from_utf8_lossy(the_match.as_bytes()).into())
}
fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
Token {
token_type,
value: value.to_string(),
start,
end: start + value.len(),
}
}
fn return_token_at_index(str_from_index: &[u8], start_index: usize) -> Option<Token> {
if is_string(str_from_index) {
return Some(make_token(
TokenType::String,
&match_first(str_from_index, &STRING)?,
start_index,
));
}
let is_line_comment_bool = is_line_comment(str_from_index);
if is_line_comment_bool || is_block_comment(str_from_index) {
return Some(make_token(
if is_line_comment_bool {
TokenType::LineComment
} else {
TokenType::BlockComment
},
&match_first(
str_from_index,
if is_line_comment_bool {
&LINECOMMENT
} else {
&BLOCKCOMMENT
},
)?,
start_index,
));
}
if is_paren_end(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &PARAN_END)?,
start_index,
));
}
if is_paren_start(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &PARAN_START)?,
start_index,
));
}
if is_block_start(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &BLOCK_START)?,
start_index,
));
}
if is_block_end(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &BLOCK_END)?,
start_index,
));
}
if is_array_start(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &ARRAY_START)?,
start_index,
));
}
if is_array_end(str_from_index) {
return Some(make_token(
TokenType::Brace,
&match_first(str_from_index, &ARRAY_END)?,
start_index,
));
}
if is_comma(str_from_index) {
return Some(make_token(
TokenType::Comma,
&match_first(str_from_index, &COMMA)?,
start_index,
));
}
if is_operator(str_from_index) {
return Some(make_token(
TokenType::Operator,
&match_first(str_from_index, &OPERATOR)?,
start_index,
));
}
if is_number(str_from_index) {
return Some(make_token(
TokenType::Number,
&match_first(str_from_index, &NUMBER)?,
start_index,
));
}
if is_keyword(str_from_index) {
return Some(make_token(
TokenType::Keyword,
&match_first(str_from_index, &KEYWORD)?,
start_index,
));
}
if is_word(str_from_index) {
return Some(make_token(
TokenType::Word,
&match_first(str_from_index, &WORD)?,
start_index,
));
}
if is_colon(str_from_index) {
return Some(make_token(
TokenType::Colon,
&match_first(str_from_index, &COLON)?,
start_index,
));
}
if is_double_period(str_from_index) {
return Some(make_token(
TokenType::DoublePeriod,
&match_first(str_from_index, &DOUBLE_PERIOD)?,
start_index,
));
}
if is_period(str_from_index) {
return Some(make_token(
TokenType::Period,
&match_first(str_from_index, &PERIOD)?,
start_index,
));
}
if is_whitespace(str_from_index) {
return Some(make_token(
TokenType::Whitespace,
&match_first(str_from_index, &WHITESPACE)?,
start_index,
));
}
None
}
pub fn lexer(s: &str) -> Vec<Token> {
let mut current_index = 0;
let mut tokens = Vec::new();
let n = s.len();
let b = s.as_bytes();
while current_index < n {
let token = return_token_at_index(&b[current_index..], current_index);
let Some(token) = token else {
current_index += 1;
continue;
};
let token_length = token.value.len();
tokens.push(token);
current_index += token_length;
}
tokens
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::*;
#[test]
fn is_number_test() {
assert!(is_number("1".as_bytes()));
assert!(is_number("1 abc".as_bytes()));
assert!(is_number("1.1".as_bytes()));
assert!(is_number("1.1 abc".as_bytes()));
assert!(!is_number("a".as_bytes()));
assert!(is_number("1".as_bytes()));
assert!(is_number(".1".as_bytes()));
assert!(is_number("5?".as_bytes()));
assert!(is_number("5 + 6".as_bytes()));
assert!(is_number("5 + a".as_bytes()));
assert!(is_number("5.5".as_bytes()));
assert!(!is_number("1abc".as_bytes()));
assert!(!is_number("a".as_bytes()));
assert!(!is_number("?".as_bytes()));
assert!(!is_number("?5".as_bytes()));
}
#[test]
fn is_whitespace_test() {
assert!(is_whitespace(" ".as_bytes()));
assert!(is_whitespace(" ".as_bytes()));
assert!(is_whitespace(" a".as_bytes()));
assert!(is_whitespace("a ".as_bytes()));
assert!(!is_whitespace("a".as_bytes()));
assert!(!is_whitespace("?".as_bytes()));
}
#[test]
fn is_word_test() {
assert!(is_word("a".as_bytes()));
assert!(is_word("a ".as_bytes()));
assert!(is_word("a5".as_bytes()));
assert!(is_word("a5a".as_bytes()));
assert!(!is_word("5".as_bytes()));
assert!(!is_word("5a".as_bytes()));
assert!(!is_word("5a5".as_bytes()));
}
#[test]
fn is_string_test() {
assert!(is_string("\"\"".as_bytes()));
assert!(is_string("\"a\"".as_bytes()));
assert!(is_string("\"a\" ".as_bytes()));
assert!(is_string("\"a\"5".as_bytes()));
assert!(is_string("'a'5".as_bytes()));
assert!(is_string("\"with escaped \\\" backslash\"".as_bytes()));
assert!(!is_string("\"".as_bytes()));
assert!(!is_string("\"a".as_bytes()));
assert!(!is_string("a\"".as_bytes()));
assert!(!is_string(" \"a\"".as_bytes()));
assert!(!is_string("5\"a\"".as_bytes()));
assert!(!is_string("a + 'str'".as_bytes()));
assert!(is_string("'c'".as_bytes()));
}
#[test]
fn is_operator_test() {
assert!(is_operator("+".as_bytes()));
assert!(is_operator("+ ".as_bytes()));
assert!(is_operator("-".as_bytes()));
assert!(is_operator("<=".as_bytes()));
assert!(is_operator("<= ".as_bytes()));
assert!(is_operator(">=".as_bytes()));
assert!(is_operator(">= ".as_bytes()));
assert!(is_operator("> ".as_bytes()));
assert!(is_operator("< ".as_bytes()));
assert!(is_operator("| ".as_bytes()));
assert!(is_operator("|> ".as_bytes()));
assert!(is_operator("^ ".as_bytes()));
assert!(is_operator("% ".as_bytes()));
assert!(is_operator("+* ".as_bytes()));
assert!(!is_operator("5 + 5".as_bytes()));
assert!(!is_operator("a".as_bytes()));
assert!(!is_operator("a+".as_bytes()));
assert!(!is_operator("a+5".as_bytes()));
assert!(!is_operator("5a+5".as_bytes()));
assert!(!is_operator(", newVar".as_bytes()));
assert!(!is_operator(",".as_bytes()));
}
#[test]
fn is_block_start_test() {
assert!(is_block_start("{".as_bytes()));
assert!(is_block_start("{ ".as_bytes()));
assert!(is_block_start("{5".as_bytes()));
assert!(is_block_start("{a".as_bytes()));
assert!(is_block_start("{5 ".as_bytes()));
assert!(!is_block_start("5".as_bytes()));
assert!(!is_block_start("5 + 5".as_bytes()));
assert!(!is_block_start("5{ + 5".as_bytes()));
assert!(!is_block_start("a{ + 5".as_bytes()));
assert!(!is_block_start(" { + 5".as_bytes()));
}
#[test]
fn is_block_end_test() {
assert!(is_block_end("}".as_bytes()));
assert!(is_block_end("} ".as_bytes()));
assert!(is_block_end("}5".as_bytes()));
assert!(is_block_end("}5 ".as_bytes()));
assert!(!is_block_end("5".as_bytes()));
assert!(!is_block_end("5 + 5".as_bytes()));
assert!(!is_block_end("5} + 5".as_bytes()));
assert!(!is_block_end(" } + 5".as_bytes()));
}
#[test]
fn is_paren_start_test() {
assert!(is_paren_start("(".as_bytes()));
assert!(is_paren_start("( ".as_bytes()));
assert!(is_paren_start("(5".as_bytes()));
assert!(is_paren_start("(5 ".as_bytes()));
assert!(is_paren_start("(5 + 5".as_bytes()));
assert!(is_paren_start("(5 + 5)".as_bytes()));
assert!(is_paren_start("(5 + 5) ".as_bytes()));
assert!(!is_paren_start("5".as_bytes()));
assert!(!is_paren_start("5 + 5".as_bytes()));
assert!(!is_paren_start("5( + 5)".as_bytes()));
assert!(!is_paren_start(" ( + 5)".as_bytes()));
}
#[test]
fn is_paren_end_test() {
assert!(is_paren_end(")".as_bytes()));
assert!(is_paren_end(") ".as_bytes()));
assert!(is_paren_end(")5".as_bytes()));
assert!(is_paren_end(")5 ".as_bytes()));
assert!(!is_paren_end("5".as_bytes()));
assert!(!is_paren_end("5 + 5".as_bytes()));
assert!(!is_paren_end("5) + 5".as_bytes()));
assert!(!is_paren_end(" ) + 5".as_bytes()));
}
#[test]
fn is_comma_test() {
assert!(is_comma(",".as_bytes()));
assert!(is_comma(", ".as_bytes()));
assert!(is_comma(",5".as_bytes()));
assert!(is_comma(",5 ".as_bytes()));
assert!(!is_comma("5".as_bytes()));
assert!(!is_comma("5 + 5".as_bytes()));
assert!(!is_comma("5, + 5".as_bytes()));
assert!(!is_comma(" , + 5".as_bytes()));
}
#[test]
fn is_line_comment_test() {
assert!(is_line_comment("//".as_bytes()));
assert!(is_line_comment("// ".as_bytes()));
assert!(is_line_comment("//5".as_bytes()));
assert!(is_line_comment("//5 ".as_bytes()));
assert!(!is_line_comment("5".as_bytes()));
assert!(!is_line_comment("5 + 5".as_bytes()));
assert!(!is_line_comment("5// + 5".as_bytes()));
assert!(!is_line_comment(" // + 5".as_bytes()));
}
#[test]
fn is_block_comment_test() {
assert!(is_block_comment("/* */".as_bytes()));
assert!(is_block_comment("/***/".as_bytes()));
assert!(is_block_comment("/*5*/".as_bytes()));
assert!(is_block_comment("/*5 */".as_bytes()));
assert!(!is_block_comment("/*".as_bytes()));
assert!(!is_block_comment("5".as_bytes()));
assert!(!is_block_comment("5 + 5".as_bytes()));
assert!(!is_block_comment("5/* + 5".as_bytes()));
assert!(!is_block_comment(" /* + 5".as_bytes()));
assert!(!is_block_comment(
r#" /* and
here
*/
"#
.as_bytes()
));
}
#[test]
fn make_token_test() {
assert_eq!(
make_token(TokenType::Keyword, "const", 56),
Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 56,
end: 61,
}
);
}
#[test]
fn return_token_at_index_test() {
assert_eq!(
return_token_at_index("const".as_bytes(), 0),
Some(Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 0,
end: 5,
})
);
assert_eq!(
return_token_at_index("4554".as_bytes(), 2),
Some(Token {
token_type: TokenType::Number,
value: "4554".to_string(),
start: 2,
end: 6,
})
);
}
#[test]
fn lexer_test() {
assert_eq!(
lexer("const a=5"),
vec![
Token {
token_type: TokenType::Keyword,
value: "const".to_string(),
start: 0,
end: 5,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 5,
end: 6,
},
Token {
token_type: TokenType::Word,
value: "a".to_string(),
start: 6,
end: 7,
},
Token {
token_type: TokenType::Operator,
value: "=".to_string(),
start: 7,
end: 8,
},
Token {
token_type: TokenType::Number,
value: "5".to_string(),
start: 8,
end: 9,
},
]
);
assert_eq!(
lexer("54 + 22500 + 6"),
vec![
Token {
token_type: TokenType::Number,
value: "54".to_string(),
start: 0,
end: 2,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 2,
end: 3,
},
Token {
token_type: TokenType::Operator,
value: "+".to_string(),
start: 3,
end: 4,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 4,
end: 5,
},
Token {
token_type: TokenType::Number,
value: "22500".to_string(),
start: 5,
end: 10,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 10,
end: 11,
},
Token {
token_type: TokenType::Operator,
value: "+".to_string(),
start: 11,
end: 12,
},
Token {
token_type: TokenType::Whitespace,
value: " ".to_string(),
start: 12,
end: 13,
},
Token {
token_type: TokenType::Number,
value: "6".to_string(),
start: 13,
end: 14,
},
]
);
}
// We have this as a test so we can ensure it never panics with an unwrap in the server.
#[test]
fn test_token_type_to_semantic_token_type() {
let semantic_types = TokenType::all_semantic_token_types().unwrap();
assert!(!semantic_types.is_empty());
}
#[test]
fn test_lexer_negative_word() {
assert_eq!(
lexer("-legX"),
vec![
Token {
token_type: TokenType::Operator,
value: "-".to_string(),
start: 0,
end: 1,
},
Token {
token_type: TokenType::Word,
value: "legX".to_string(),
start: 1,
end: 5,
},
]
);
}
}

View File

@ -84,13 +84,13 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts // test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
#[wasm_bindgen] #[wasm_bindgen]
pub fn lexer_js(js: &str) -> Result<JsValue, JsError> { pub fn lexer_js(js: &str) -> Result<JsValue, JsError> {
let tokens = kcl_lib::tokeniser::lexer(js); let tokens = kcl_lib::token::lexer(js);
Ok(JsValue::from_serde(&tokens)?) Ok(JsValue::from_serde(&tokens)?)
} }
#[wasm_bindgen] #[wasm_bindgen]
pub fn parse_js(js: &str) -> Result<JsValue, String> { pub fn parse_js(js: &str) -> Result<JsValue, String> {
let tokens = kcl_lib::tokeniser::lexer(js); let tokens = kcl_lib::token::lexer(js);
let parser = kcl_lib::parser::Parser::new(tokens); let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast().map_err(String::from)?; let program = parser.ast().map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the // The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
@ -149,7 +149,7 @@ pub async fn lsp_run(config: ServerConfig) -> Result<(), JsValue> {
let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?; let stdlib_signatures = get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
// We can unwrap here because we know the tokeniser is valid, since // We can unwrap here because we know the tokeniser is valid, since
// we have a test for it. // we have a test for it.
let token_types = kcl_lib::tokeniser::TokenType::all_semantic_token_types().unwrap(); let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
let (service, socket) = LspService::new(|client| Backend { let (service, socket) = LspService::new(|client| Backend {
client, client,

View File

@ -306,5 +306,5 @@ const svg = startSketchAt([0, 0])
|> lineTo([13.44, -10.92], %) // HorizontalLineRelative |> lineTo([13.44, -10.92], %) // HorizontalLineRelative
|> lineTo([13.44, -13.44], %) // VerticalLineHorizonal |> lineTo([13.44, -13.44], %) // VerticalLineHorizonal
|> lineTo([14.28, -13.44], %) // HorizontalLineRelative |> lineTo([14.28, -13.44], %) // HorizontalLineRelative
|> close(%); |> close(%)
show(svg); show(svg)

View File

@ -466,5 +466,5 @@ const svg = startSketchAt([0, 0])
|> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute |> bezierCurve({ control1: [-4, -3], control2: [-2.66, -3.67], to: [-3.32, -3.34] }, %) // CubicBezierAbsolute
|> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute |> bezierCurve({ control1: [0, -2], control2: [-2.68, -2.67], to: [-1.36, -2.34] }, %) // CubicBezierAbsolute
|> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute |> bezierCurve({ control1: [0, -0], control2: [0, -1.34], to: [0, -0.68] }, %) // CubicBezierAbsolute
|> close(%); |> close(%)
show(svg); show(svg)

View File

@ -32,7 +32,7 @@ async fn execute_and_snapshot(code: &str) -> Result<image::DynamicImage> {
// Create a temporary file to write the output to. // Create a temporary file to write the output to.
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4())); let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
let tokens = kcl_lib::tokeniser::lexer(code); let tokens = kcl_lib::token::lexer(code);
let parser = kcl_lib::parser::Parser::new(tokens); let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?; let program = parser.ast()?;
let mut mem: kcl_lib::executor::ProgramMemory = Default::default(); let mut mem: kcl_lib::executor::ProgramMemory = Default::default();
@ -210,3 +210,45 @@ show(b2)"#;
1.0, 1.0,
); );
} }
#[tokio::test(flavor = "multi_thread")]
async fn test_close_arc() {
let code = r#"const center = [0,0]
const radius = 40
const height = 3
const body = startSketchAt([center[0]+radius, center[1]])
|> arc({angle_end: 360, angle_start: 0, radius: radius}, %)
|> close(%)
|> extrude(height, %)
show(body)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/close_arc.png", &result, 1.0);
}
#[tokio::test(flavor = "multi_thread")]
async fn test_negative_args() {
let code = r#"const width = 5
const height = 10
const length = 12
fn box = (sk1, sk2, scale) => {
const boxSketch = startSketchAt([sk1, sk2])
|> line([0, scale], %)
|> line([scale, 0], %)
|> line([0, -scale], %)
|> close(%)
|> extrude(scale, %)
return boxSketch
}
box(0, 0, 5)
box(10, 23, 8)
let thing = box(-12, -15, 10)
box(-20, -5, 10)"#;
let result = execute_and_snapshot(code).await.unwrap();
twenty_twenty::assert_image("tests/executor/outputs/negative_args.png", &result, 1.0);
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

View File

@ -33,7 +33,7 @@ async fn setup(code: &str, name: &str) -> Result<(EngineConnection, Program, uui
.commands_ws(None, None, None, None, Some(false)) .commands_ws(None, None, None, None, Some(false))
.await?; .await?;
let tokens = kcl_lib::tokeniser::lexer(code); let tokens = kcl_lib::token::lexer(code);
let parser = kcl_lib::parser::Parser::new(tokens); let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?; let program = parser.ast()?;
let mut mem: kcl_lib::executor::ProgramMemory = Default::default(); let mut mem: kcl_lib::executor::ProgramMemory = Default::default();

View File

@ -1530,10 +1530,10 @@
resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60" resolved "https://registry.yarnpkg.com/@juggle/resize-observer/-/resize-observer-3.4.0.tgz#08d6c5e20cf7e4cc02fd181c4b0c225cd31dbb60"
integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA== integrity sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==
"@kittycad/lib@^0.0.38": "@kittycad/lib@^0.0.39":
version "0.0.38" version "0.0.39"
resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.38.tgz#50474266f679990bd414c30f884f2d42a0d5dba9" resolved "https://registry.yarnpkg.com/@kittycad/lib/-/lib-0.0.39.tgz#e548acf5ff7d45a1f1ec9ad2c61ddcfc30d159b7"
integrity sha512-Lv9P7jqVRoGgOnCsRCsG8OwZH5n3scxXYrElR+5/Rsd6/KIarLB4bSBngJrXebOnmTw5md0OPeY+b3ZDbZFDeg== integrity sha512-cB4wNjsKTMpJUn/kMK3qtkVAqB1csSglqThe+bj02nC1kWTB1XgYxksooc/Gzl1MoK1/n0OPQcbOb7Tojb836A==
dependencies: dependencies:
node-fetch "3.3.2" node-fetch "3.3.2"
openapi-types "^12.0.0" openapi-types "^12.0.0"
@ -1888,11 +1888,6 @@
resolved "https://registry.yarnpkg.com/@types/debounce-promise/-/debounce-promise-3.1.6.tgz#873e838574011095ed0debf73eed3538e1261d75" resolved "https://registry.yarnpkg.com/@types/debounce-promise/-/debounce-promise-3.1.6.tgz#873e838574011095ed0debf73eed3538e1261d75"
integrity sha512-DowqK95aku+OxMCeG2EQSeXeGeE8OCwLpMsUfIbP7hMF8Otj8eQXnzpwdtIKV+UqQBtkMcF6vbi4Otbh8P/wmg== integrity sha512-DowqK95aku+OxMCeG2EQSeXeGeE8OCwLpMsUfIbP7hMF8Otj8eQXnzpwdtIKV+UqQBtkMcF6vbi4Otbh8P/wmg==
"@types/debounce@^1.2.1":
version "1.2.1"
resolved "https://registry.yarnpkg.com/@types/debounce/-/debounce-1.2.1.tgz#79b65710bc8b6d44094d286aecf38e44f9627852"
integrity sha512-epMsEE85fi4lfmJUH/89/iV/LI+F5CvNIvmgs5g5jYFPfhO2S/ae8WSsLOKWdwtoaZw9Q2IhJ4tQ5tFCcS/4HA==
"@types/eslint@^8.4.5": "@types/eslint@^8.4.5":
version "8.44.1" version "8.44.1"
resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.1.tgz#d1811559bb6bcd1a76009e3f7883034b78a0415e" resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-8.44.1.tgz#d1811559bb6bcd1a76009e3f7883034b78a0415e"