fixes obj.thing
stuff and member expressions and bugs (#461)
* add failing tests for loops Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix obj["thing"] and obj.thing complex cases Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix clippy Signed-off-by: Jess Frazelle <github@jessfraz.com> * remove println Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix test Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes more tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixups Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixups Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * add more tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * more test fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * last test fix Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
@ -11173,9 +11173,6 @@
|
|||||||
},
|
},
|
||||||
"to": {
|
"to": {
|
||||||
"description": "The to point.",
|
"description": "The to point.",
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"description": "A point.",
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"type": "number",
|
"type": "number",
|
||||||
@ -11183,12 +11180,6 @@
|
|||||||
},
|
},
|
||||||
"maxItems": 2,
|
"maxItems": 2,
|
||||||
"minItems": 2
|
"minItems": 2
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "A string like `default`.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -11201,10 +11192,6 @@
|
|||||||
},
|
},
|
||||||
"maxItems": 2,
|
"maxItems": 2,
|
||||||
"minItems": 2
|
"minItems": 2
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "A string like `default`.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -15341,9 +15328,6 @@
|
|||||||
},
|
},
|
||||||
"to": {
|
"to": {
|
||||||
"description": "The to point.",
|
"description": "The to point.",
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"description": "A point.",
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"type": "number",
|
"type": "number",
|
||||||
@ -15351,12 +15335,6 @@
|
|||||||
},
|
},
|
||||||
"maxItems": 2,
|
"maxItems": 2,
|
||||||
"minItems": 2
|
"minItems": 2
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "A string like `default`.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -15369,10 +15347,6 @@
|
|||||||
},
|
},
|
||||||
"maxItems": 2,
|
"maxItems": 2,
|
||||||
"minItems": 2
|
"minItems": 2
|
||||||
},
|
|
||||||
{
|
|
||||||
"description": "A string like `default`.",
|
|
||||||
"type": "string"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
12
docs/kcl.md
12
docs/kcl.md
@ -2044,11 +2044,9 @@ line(data: LineData, sketch_group: SketchGroup) -> SketchGroup
|
|||||||
// The tag.
|
// The tag.
|
||||||
tag: string,
|
tag: string,
|
||||||
// The to point.
|
// The to point.
|
||||||
to: [number] |
|
to: [number],
|
||||||
string,
|
|
||||||
} |
|
} |
|
||||||
[number] |
|
[number]
|
||||||
string
|
|
||||||
```
|
```
|
||||||
* `sketch_group`: `SketchGroup` - A sketch group is a collection of paths.
|
* `sketch_group`: `SketchGroup` - A sketch group is a collection of paths.
|
||||||
```
|
```
|
||||||
@ -2784,11 +2782,9 @@ startSketchAt(data: LineData) -> SketchGroup
|
|||||||
// The tag.
|
// The tag.
|
||||||
tag: string,
|
tag: string,
|
||||||
// The to point.
|
// The to point.
|
||||||
to: [number] |
|
to: [number],
|
||||||
string,
|
|
||||||
} |
|
} |
|
||||||
[number] |
|
[number]
|
||||||
string
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Returns
|
#### Returns
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { parser_wasm } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import { KCLUnexpectedError } from './errors'
|
import { KCLError } from './errors'
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -1744,6 +1744,12 @@ describe('parsing errors', () => {
|
|||||||
_theError = e
|
_theError = e
|
||||||
}
|
}
|
||||||
const theError = _theError as any
|
const theError = _theError as any
|
||||||
expect(theError).toEqual(new KCLUnexpectedError('Brace', [[29, 30]]))
|
expect(theError).toEqual(
|
||||||
|
new KCLError(
|
||||||
|
'unexpected',
|
||||||
|
'Unexpected token Token { token_type: Brace, start: 29, end: 30, value: "}" }',
|
||||||
|
[[29, 30]]
|
||||||
|
)
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -27,6 +27,7 @@ import {
|
|||||||
getFirstArg,
|
getFirstArg,
|
||||||
createFirstArg,
|
createFirstArg,
|
||||||
} from './std/sketch'
|
} from './std/sketch'
|
||||||
|
import { isLiteralArrayOrStatic } from './std/sketchcombos'
|
||||||
|
|
||||||
export function addStartSketch(
|
export function addStartSketch(
|
||||||
node: Program,
|
node: Program,
|
||||||
@ -191,7 +192,7 @@ export function mutateArrExp(
|
|||||||
): boolean {
|
): boolean {
|
||||||
if (node.type === 'ArrayExpression') {
|
if (node.type === 'ArrayExpression') {
|
||||||
node.elements.forEach((element, i) => {
|
node.elements.forEach((element, i) => {
|
||||||
if (element.type === 'Literal') {
|
if (isLiteralArrayOrStatic(element)) {
|
||||||
node.elements[i] = updateWith.elements[i]
|
node.elements[i] = updateWith.elements[i]
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -209,8 +210,8 @@ export function mutateObjExpProp(
|
|||||||
const keyIndex = node.properties.findIndex((a) => a.key.name === key)
|
const keyIndex = node.properties.findIndex((a) => a.key.name === key)
|
||||||
if (keyIndex !== -1) {
|
if (keyIndex !== -1) {
|
||||||
if (
|
if (
|
||||||
updateWith.type === 'Literal' &&
|
isLiteralArrayOrStatic(updateWith) &&
|
||||||
node.properties[keyIndex].value.type === 'Literal'
|
isLiteralArrayOrStatic(node.properties[keyIndex].value)
|
||||||
) {
|
) {
|
||||||
node.properties[keyIndex].value = updateWith
|
node.properties[keyIndex].value = updateWith
|
||||||
return true
|
return true
|
||||||
@ -220,7 +221,7 @@ export function mutateObjExpProp(
|
|||||||
) {
|
) {
|
||||||
const arrExp = node.properties[keyIndex].value as ArrayExpression
|
const arrExp = node.properties[keyIndex].value as ArrayExpression
|
||||||
arrExp.elements.forEach((element, i) => {
|
arrExp.elements.forEach((element, i) => {
|
||||||
if (element.type === 'Literal') {
|
if (isLiteralArrayOrStatic(element)) {
|
||||||
arrExp.elements[i] = updateWith.elements[i]
|
arrExp.elements[i] = updateWith.elements[i]
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -20,6 +20,7 @@ import {
|
|||||||
getNodeFromPathCurry,
|
getNodeFromPathCurry,
|
||||||
getNodePathFromSourceRange,
|
getNodePathFromSourceRange,
|
||||||
} from '../queryAst'
|
} from '../queryAst'
|
||||||
|
import { isLiteralArrayOrStatic } from './sketchcombos'
|
||||||
import { GuiModes, toolTips, TooTip } from '../../useStore'
|
import { GuiModes, toolTips, TooTip } from '../../useStore'
|
||||||
import { createPipeExpression, splitPathAtPipeExpression } from '../modifyAst'
|
import { createPipeExpression, splitPathAtPipeExpression } from '../modifyAst'
|
||||||
import { generateUuidFromHashSeed } from '../../lib/uuid'
|
import { generateUuidFromHashSeed } from '../../lib/uuid'
|
||||||
@ -294,7 +295,7 @@ export const xLineTo: SketchLineHelper = {
|
|||||||
pathToNode
|
pathToNode
|
||||||
)
|
)
|
||||||
const newX = createLiteral(roundOff(to[0], 2))
|
const newX = createLiteral(roundOff(to[0], 2))
|
||||||
if (callExpression.arguments?.[0]?.type === 'Literal') {
|
if (isLiteralArrayOrStatic(callExpression.arguments?.[0])) {
|
||||||
callExpression.arguments[0] = newX
|
callExpression.arguments[0] = newX
|
||||||
} else {
|
} else {
|
||||||
mutateObjExpProp(callExpression.arguments?.[0], newX, 'to')
|
mutateObjExpProp(callExpression.arguments?.[0], newX, 'to')
|
||||||
@ -342,7 +343,7 @@ export const yLineTo: SketchLineHelper = {
|
|||||||
pathToNode
|
pathToNode
|
||||||
)
|
)
|
||||||
const newY = createLiteral(roundOff(to[1], 2))
|
const newY = createLiteral(roundOff(to[1], 2))
|
||||||
if (callExpression.arguments?.[0]?.type === 'Literal') {
|
if (isLiteralArrayOrStatic(callExpression.arguments?.[0])) {
|
||||||
callExpression.arguments[0] = newY
|
callExpression.arguments[0] = newY
|
||||||
} else {
|
} else {
|
||||||
mutateObjExpProp(callExpression.arguments?.[0], newY, 'to')
|
mutateObjExpProp(callExpression.arguments?.[0], newY, 'to')
|
||||||
@ -392,7 +393,7 @@ export const xLine: SketchLineHelper = {
|
|||||||
pathToNode
|
pathToNode
|
||||||
)
|
)
|
||||||
const newX = createLiteral(roundOff(to[0] - from[0], 2))
|
const newX = createLiteral(roundOff(to[0] - from[0], 2))
|
||||||
if (callExpression.arguments?.[0]?.type === 'Literal') {
|
if (isLiteralArrayOrStatic(callExpression.arguments?.[0])) {
|
||||||
callExpression.arguments[0] = newX
|
callExpression.arguments[0] = newX
|
||||||
} else {
|
} else {
|
||||||
mutateObjExpProp(callExpression.arguments?.[0], newX, 'length')
|
mutateObjExpProp(callExpression.arguments?.[0], newX, 'length')
|
||||||
@ -436,7 +437,7 @@ export const yLine: SketchLineHelper = {
|
|||||||
pathToNode
|
pathToNode
|
||||||
)
|
)
|
||||||
const newY = createLiteral(roundOff(to[1] - from[1], 2))
|
const newY = createLiteral(roundOff(to[1] - from[1], 2))
|
||||||
if (callExpression.arguments?.[0]?.type === 'Literal') {
|
if (isLiteralArrayOrStatic(callExpression.arguments?.[0])) {
|
||||||
callExpression.arguments[0] = newY
|
callExpression.arguments[0] = newY
|
||||||
} else {
|
} else {
|
||||||
mutateObjExpProp(callExpression.arguments?.[0], newY, 'length')
|
mutateObjExpProp(callExpression.arguments?.[0], newY, 'length')
|
||||||
@ -1036,10 +1037,11 @@ export function addTagForSketchOnFace(
|
|||||||
|
|
||||||
function isAngleLiteral(lineArugement: Value): boolean {
|
function isAngleLiteral(lineArugement: Value): boolean {
|
||||||
return lineArugement?.type === 'ArrayExpression'
|
return lineArugement?.type === 'ArrayExpression'
|
||||||
? lineArugement.elements[0].type === 'Literal'
|
? isLiteralArrayOrStatic(lineArugement.elements[0])
|
||||||
: lineArugement?.type === 'ObjectExpression'
|
: lineArugement?.type === 'ObjectExpression'
|
||||||
? lineArugement.properties.find(({ key }) => key.name === 'angle')?.value
|
? isLiteralArrayOrStatic(
|
||||||
.type === 'Literal'
|
lineArugement.properties.find(({ key }) => key.name === 'angle')?.value
|
||||||
|
)
|
||||||
: false
|
: false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1137,27 +1137,18 @@ export function getRemoveConstraintsTransform(
|
|||||||
|
|
||||||
// check if the function is locked down and so can't be transformed
|
// check if the function is locked down and so can't be transformed
|
||||||
const firstArg = getFirstArg(sketchFnExp)
|
const firstArg = getFirstArg(sketchFnExp)
|
||||||
if (Array.isArray(firstArg.val)) {
|
if (isNotLiteralArrayOrStatic(firstArg.val)) {
|
||||||
const [a, b] = firstArg.val
|
|
||||||
if (a?.type !== 'Literal' || b?.type !== 'Literal') {
|
|
||||||
return transformInfo
|
return transformInfo
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if (firstArg.val?.type !== 'Literal') {
|
|
||||||
return transformInfo
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the function has no constraints
|
// check if the function has no constraints
|
||||||
const isTwoValFree =
|
const isTwoValFree =
|
||||||
Array.isArray(firstArg.val) &&
|
Array.isArray(firstArg.val) && isLiteralArrayOrStatic(firstArg.val)
|
||||||
firstArg.val?.[0]?.type === 'Literal' &&
|
|
||||||
firstArg.val?.[1]?.type === 'Literal'
|
|
||||||
if (isTwoValFree) {
|
if (isTwoValFree) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
const isOneValFree =
|
const isOneValFree =
|
||||||
!Array.isArray(firstArg.val) && firstArg.val?.type === 'Literal'
|
!Array.isArray(firstArg.val) && isLiteralArrayOrStatic(firstArg.val)
|
||||||
if (isOneValFree) {
|
if (isOneValFree) {
|
||||||
return transformInfo
|
return transformInfo
|
||||||
}
|
}
|
||||||
@ -1188,25 +1179,12 @@ function getTransformMapPath(
|
|||||||
|
|
||||||
// check if the function is locked down and so can't be transformed
|
// check if the function is locked down and so can't be transformed
|
||||||
const firstArg = getFirstArg(sketchFnExp)
|
const firstArg = getFirstArg(sketchFnExp)
|
||||||
if (Array.isArray(firstArg.val)) {
|
if (isNotLiteralArrayOrStatic(firstArg.val)) {
|
||||||
const [a, b] = firstArg.val
|
|
||||||
if (a?.type !== 'Literal' && b?.type !== 'Literal') {
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if (firstArg.val?.type !== 'Literal') {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if the function has no constraints
|
// check if the function has no constraints
|
||||||
const isTwoValFree =
|
if (isLiteralArrayOrStatic(firstArg.val)) {
|
||||||
Array.isArray(firstArg.val) &&
|
|
||||||
firstArg.val?.[0]?.type === 'Literal' &&
|
|
||||||
firstArg.val?.[1]?.type === 'Literal'
|
|
||||||
const isOneValFree =
|
|
||||||
!Array.isArray(firstArg.val) && firstArg.val?.type === 'Literal'
|
|
||||||
if (isTwoValFree || isOneValFree) {
|
|
||||||
const info = transformMap?.[name]?.free?.[constraintType]
|
const info = transformMap?.[name]?.free?.[constraintType]
|
||||||
if (info)
|
if (info)
|
||||||
return {
|
return {
|
||||||
@ -1260,7 +1238,7 @@ export function getConstraintType(
|
|||||||
if (fnName === 'xLineTo') return 'yAbsolute'
|
if (fnName === 'xLineTo') return 'yAbsolute'
|
||||||
if (fnName === 'yLineTo') return 'xAbsolute'
|
if (fnName === 'yLineTo') return 'xAbsolute'
|
||||||
} else {
|
} else {
|
||||||
const isFirstArgLockedDown = val?.[0]?.type !== 'Literal'
|
const isFirstArgLockedDown = isNotLiteralArrayOrStatic(val[0])
|
||||||
if (fnName === 'line')
|
if (fnName === 'line')
|
||||||
return isFirstArgLockedDown ? 'xRelative' : 'yRelative'
|
return isFirstArgLockedDown ? 'xRelative' : 'yRelative'
|
||||||
if (fnName === 'lineTo')
|
if (fnName === 'lineTo')
|
||||||
@ -1539,23 +1517,46 @@ export function getConstraintLevelFromSourceRange(
|
|||||||
const firstArg = getFirstArg(sketchFnExp)
|
const firstArg = getFirstArg(sketchFnExp)
|
||||||
|
|
||||||
// check if the function is fully constrained
|
// check if the function is fully constrained
|
||||||
if (Array.isArray(firstArg.val)) {
|
if (isNotLiteralArrayOrStatic(firstArg.val)) {
|
||||||
const [a, b] = firstArg.val
|
return 'full'
|
||||||
if (a?.type !== 'Literal' && b?.type !== 'Literal') return 'full'
|
|
||||||
} else {
|
|
||||||
if (firstArg.val?.type !== 'Literal') return 'full'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if the function has no constraints
|
// check if the function has no constraints
|
||||||
const isTwoValFree =
|
const isTwoValFree =
|
||||||
Array.isArray(firstArg.val) &&
|
Array.isArray(firstArg.val) && isLiteralArrayOrStatic(firstArg.val)
|
||||||
firstArg.val?.[0]?.type === 'Literal' &&
|
|
||||||
firstArg.val?.[1]?.type === 'Literal'
|
|
||||||
const isOneValFree =
|
const isOneValFree =
|
||||||
!Array.isArray(firstArg.val) && firstArg.val?.type === 'Literal'
|
!Array.isArray(firstArg.val) && isLiteralArrayOrStatic(firstArg.val)
|
||||||
|
|
||||||
if (isTwoValFree) return 'free'
|
if (isTwoValFree) return 'free'
|
||||||
if (isOneValFree) return 'partial'
|
if (isOneValFree) return 'partial'
|
||||||
|
|
||||||
return 'partial'
|
return 'partial'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isLiteralArrayOrStatic(
|
||||||
|
val: Value | [Value, Value] | [Value, Value, Value] | undefined
|
||||||
|
): boolean {
|
||||||
|
if (!val) return false
|
||||||
|
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
const [a, b] = val
|
||||||
|
return isLiteralArrayOrStatic(a) && isLiteralArrayOrStatic(b)
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
val.type === 'Literal' ||
|
||||||
|
(val.type === 'UnaryExpression' && val.argument.type === 'Literal')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isNotLiteralArrayOrStatic(
|
||||||
|
val: Value | [Value, Value] | [Value, Value, Value]
|
||||||
|
): boolean {
|
||||||
|
if (Array.isArray(val)) {
|
||||||
|
const [a, b] = val
|
||||||
|
return isNotLiteralArrayOrStatic(a) && isNotLiteralArrayOrStatic(b)
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
(val.type !== 'Literal' && val.type !== 'UnaryExpression') ||
|
||||||
|
(val.type === 'UnaryExpression' && val.argument.type !== 'Literal')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
@ -131,10 +131,12 @@ const yi=45`
|
|||||||
})
|
})
|
||||||
it('test negative and decimal numbers', () => {
|
it('test negative and decimal numbers', () => {
|
||||||
expect(stringSummaryLexer('-1')).toEqual([
|
expect(stringSummaryLexer('-1')).toEqual([
|
||||||
"number '-1' from 0 to 2",
|
"operator '-' from 0 to 1",
|
||||||
|
"number '1' from 1 to 2",
|
||||||
])
|
])
|
||||||
expect(stringSummaryLexer('-1.5')).toEqual([
|
expect(stringSummaryLexer('-1.5')).toEqual([
|
||||||
"number '-1.5' from 0 to 4",
|
"operator '-' from 0 to 1",
|
||||||
|
"number '1.5' from 1 to 4",
|
||||||
])
|
])
|
||||||
expect(stringSummaryLexer('1.5')).toEqual([
|
expect(stringSummaryLexer('1.5')).toEqual([
|
||||||
"number '1.5' from 0 to 3",
|
"number '1.5' from 0 to 3",
|
||||||
@ -158,10 +160,12 @@ const yi=45`
|
|||||||
"whitespace ' ' from 3 to 4",
|
"whitespace ' ' from 3 to 4",
|
||||||
"operator '+' from 4 to 5",
|
"operator '+' from 4 to 5",
|
||||||
"whitespace ' ' from 5 to 6",
|
"whitespace ' ' from 5 to 6",
|
||||||
"number '-2.5' from 6 to 10",
|
"operator '-' from 6 to 7",
|
||||||
|
"number '2.5' from 7 to 10",
|
||||||
])
|
])
|
||||||
expect(stringSummaryLexer('-1.5 + 2.5')).toEqual([
|
expect(stringSummaryLexer('-1.5 + 2.5')).toEqual([
|
||||||
"number '-1.5' from 0 to 4",
|
"operator '-' from 0 to 1",
|
||||||
|
"number '1.5' from 1 to 4",
|
||||||
"whitespace ' ' from 4 to 5",
|
"whitespace ' ' from 4 to 5",
|
||||||
"operator '+' from 5 to 6",
|
"operator '+' from 5 to 6",
|
||||||
"whitespace ' ' from 6 to 7",
|
"whitespace ' ' from 6 to 7",
|
||||||
|
@ -449,6 +449,7 @@ pub enum BinaryPart {
|
|||||||
BinaryExpression(Box<BinaryExpression>),
|
BinaryExpression(Box<BinaryExpression>),
|
||||||
CallExpression(Box<CallExpression>),
|
CallExpression(Box<CallExpression>),
|
||||||
UnaryExpression(Box<UnaryExpression>),
|
UnaryExpression(Box<UnaryExpression>),
|
||||||
|
MemberExpression(Box<MemberExpression>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<BinaryPart> for crate::executor::SourceRange {
|
impl From<BinaryPart> for crate::executor::SourceRange {
|
||||||
@ -471,6 +472,7 @@ impl BinaryPart {
|
|||||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(options),
|
BinaryPart::BinaryExpression(binary_expression) => binary_expression.recast(options),
|
||||||
BinaryPart::CallExpression(call_expression) => call_expression.recast(options, indentation_level, false),
|
BinaryPart::CallExpression(call_expression) => call_expression.recast(options, indentation_level, false),
|
||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(options),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.recast(options),
|
||||||
|
BinaryPart::MemberExpression(member_expression) => member_expression.recast(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -481,6 +483,7 @@ impl BinaryPart {
|
|||||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.start(),
|
BinaryPart::BinaryExpression(binary_expression) => binary_expression.start(),
|
||||||
BinaryPart::CallExpression(call_expression) => call_expression.start(),
|
BinaryPart::CallExpression(call_expression) => call_expression.start(),
|
||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.start(),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.start(),
|
||||||
|
BinaryPart::MemberExpression(member_expression) => member_expression.start(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -491,6 +494,7 @@ impl BinaryPart {
|
|||||||
BinaryPart::BinaryExpression(binary_expression) => binary_expression.end(),
|
BinaryPart::BinaryExpression(binary_expression) => binary_expression.end(),
|
||||||
BinaryPart::CallExpression(call_expression) => call_expression.end(),
|
BinaryPart::CallExpression(call_expression) => call_expression.end(),
|
||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.end(),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.end(),
|
||||||
|
BinaryPart::MemberExpression(member_expression) => member_expression.end(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -523,6 +527,7 @@ impl BinaryPart {
|
|||||||
source_ranges: vec![unary_expression.into()],
|
source_ranges: vec![unary_expression.into()],
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
BinaryPart::MemberExpression(member_expression) => member_expression.get_result(memory),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -536,6 +541,9 @@ impl BinaryPart {
|
|||||||
}
|
}
|
||||||
BinaryPart::CallExpression(call_expression) => call_expression.get_hover_value_for_position(pos, code),
|
BinaryPart::CallExpression(call_expression) => call_expression.get_hover_value_for_position(pos, code),
|
||||||
BinaryPart::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.get_hover_value_for_position(pos, code),
|
||||||
|
BinaryPart::MemberExpression(member_expression) => {
|
||||||
|
member_expression.get_hover_value_for_position(pos, code)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -553,6 +561,9 @@ impl BinaryPart {
|
|||||||
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
BinaryPart::UnaryExpression(ref mut unary_expression) => {
|
||||||
unary_expression.rename_identifiers(old_name, new_name)
|
unary_expression.rename_identifiers(old_name, new_name)
|
||||||
}
|
}
|
||||||
|
BinaryPart::MemberExpression(ref mut member_expression) => {
|
||||||
|
member_expression.rename_identifiers(old_name, new_name)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -751,12 +762,7 @@ impl CallExpression {
|
|||||||
})
|
})
|
||||||
})?
|
})?
|
||||||
.clone(),
|
.clone(),
|
||||||
Value::MemberExpression(member_expression) => {
|
Value::MemberExpression(member_expression) => member_expression.get_result(memory)?,
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: format!("MemberExpression not implemented here: {:?}", member_expression),
|
|
||||||
source_ranges: vec![member_expression.into()],
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
Value::FunctionExpression(function_expression) => {
|
Value::FunctionExpression(function_expression) => {
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
return Err(KclError::Semantic(KclErrorDetails {
|
||||||
message: format!("FunctionExpression not implemented here: {:?}", function_expression),
|
message: format!("FunctionExpression not implemented here: {:?}", function_expression),
|
||||||
@ -1227,12 +1233,7 @@ impl ArrayExpression {
|
|||||||
source_ranges: vec![pipe_substitution.into()],
|
source_ranges: vec![pipe_substitution.into()],
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
Value::MemberExpression(member_expression) => {
|
Value::MemberExpression(member_expression) => member_expression.get_result(memory)?,
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
|
||||||
message: format!("MemberExpression not implemented here: {:?}", member_expression),
|
|
||||||
source_ranges: vec![member_expression.into()],
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
Value::FunctionExpression(function_expression) => {
|
Value::FunctionExpression(function_expression) => {
|
||||||
return Err(KclError::Semantic(KclErrorDetails {
|
return Err(KclError::Semantic(KclErrorDetails {
|
||||||
message: format!("FunctionExpression not implemented here: {:?}", function_expression),
|
message: format!("FunctionExpression not implemented here: {:?}", function_expression),
|
||||||
@ -2542,4 +2543,15 @@ show(firstExtrude)
|
|||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_recast_math_start_negative() {
|
||||||
|
let some_program_string = r#"const myVar = -5 + 6"#;
|
||||||
|
let tokens = crate::tokeniser::lexer(some_program_string);
|
||||||
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
|
let program = parser.ast().unwrap();
|
||||||
|
|
||||||
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1007,4 +1007,160 @@ show(fnBox)"#;
|
|||||||
|
|
||||||
parse_execute(ast).await.unwrap();
|
parse_execute(ast).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_get_member_of_object_with_function_period() {
|
||||||
|
let ast = r#"const box = (obj) => {
|
||||||
|
let myBox = startSketchAt(obj.start)
|
||||||
|
|> line([0, obj.l], %)
|
||||||
|
|> line([obj.w, 0], %)
|
||||||
|
|> line([0, -obj.l], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(obj.h, %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
const thisBox = box({start: [0,0], l: 6, w: 10, h: 3})
|
||||||
|
|
||||||
|
show(thisBox)
|
||||||
|
"#;
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_get_member_of_object_with_function_brace() {
|
||||||
|
let ast = r#"const box = (obj) => {
|
||||||
|
let myBox = startSketchAt(obj["start"])
|
||||||
|
|> line([0, obj["l"]], %)
|
||||||
|
|> line([obj["w"], 0], %)
|
||||||
|
|> line([0, -obj["l"]], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(obj["h"], %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
const thisBox = box({start: [0,0], l: 6, w: 10, h: 3})
|
||||||
|
|
||||||
|
show(thisBox)
|
||||||
|
"#;
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_get_member_of_object_with_function_mix_period_brace() {
|
||||||
|
let ast = r#"const box = (obj) => {
|
||||||
|
let myBox = startSketchAt(obj["start"])
|
||||||
|
|> line([0, obj["l"]], %)
|
||||||
|
|> line([obj["w"], 0], %)
|
||||||
|
|> line([10 - obj["w"], -obj.l], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(obj["h"], %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
const thisBox = box({start: [0,0], l: 6, w: 10, h: 3})
|
||||||
|
|
||||||
|
show(thisBox)
|
||||||
|
"#;
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
#[ignore] // ignore til we get loops
|
||||||
|
async fn test_execute_with_function_sketch_loop_objects() {
|
||||||
|
let ast = r#"const box = (obj) => {
|
||||||
|
let myBox = startSketchAt(obj.start)
|
||||||
|
|> line([0, obj.l], %)
|
||||||
|
|> line([obj.w, 0], %)
|
||||||
|
|> line([0, -obj.l], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(obj.h, %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
for var in [{start: [0,0], l: 6, w: 10, h: 3}, {start: [-10,-10], l: 3, w: 5, h: 1.5}] {
|
||||||
|
const thisBox = box(var)
|
||||||
|
show(thisBox)
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
#[ignore] // ignore til we get loops
|
||||||
|
async fn test_execute_with_function_sketch_loop_array() {
|
||||||
|
let ast = r#"const box = (h, l, w, start) => {
|
||||||
|
const myBox = startSketchAt([0,0])
|
||||||
|
|> line([0, l], %)
|
||||||
|
|> line([w, 0], %)
|
||||||
|
|> line([0, -l], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(h, %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for var in [[3, 6, 10, [0,0]], [1.5, 3, 5, [-10,-10]]] {
|
||||||
|
const thisBox = box(var[0], var[1], var[2], var[3])
|
||||||
|
show(thisBox)
|
||||||
|
}"#;
|
||||||
|
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
#[ignore] // ignore til we get working.
|
||||||
|
async fn test_get_member_of_array_with_function() {
|
||||||
|
let ast = r#"const box = (array) => {
|
||||||
|
let myBox = startSketchAt(array[0])
|
||||||
|
|> line([0, array[1], %)
|
||||||
|
|> line([array[2], 0], %)
|
||||||
|
|> line([0, -array[1]], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(array[3], %)
|
||||||
|
|
||||||
|
return myBox
|
||||||
|
}
|
||||||
|
|
||||||
|
const thisBox = box([[0,0], 6, 10, 3])
|
||||||
|
|
||||||
|
show(thisBox)
|
||||||
|
"#;
|
||||||
|
parse_execute(ast).await.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_math_execute_with_functions() {
|
||||||
|
let ast = r#"const myVar = 2 + min(100, -1 + legLen(5, 3))"#;
|
||||||
|
let memory = parse_execute(ast).await.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
serde_json::json!(5.0),
|
||||||
|
memory.root.get("myVar").unwrap().get_json_value().unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_math_execute() {
|
||||||
|
let ast = r#"const myVar = 1 + 2 * (3 - 4) / -5 + 6"#;
|
||||||
|
let memory = parse_execute(ast).await.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
serde_json::json!(7.4),
|
||||||
|
memory.root.get("myVar").unwrap().get_json_value().unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_math_execute_start_negative() {
|
||||||
|
let ast = r#"const myVar = -5 + 6"#;
|
||||||
|
let memory = parse_execute(ast).await.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
serde_json::json!(1.0),
|
||||||
|
memory.root.get("myVar").unwrap().get_json_value().unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
abstract_syntax_tree_types::{
|
abstract_syntax_tree_types::{
|
||||||
BinaryExpression, BinaryOperator, BinaryPart, CallExpression, Identifier, Literal, ValueMeta,
|
BinaryExpression, BinaryOperator, BinaryPart, CallExpression, Identifier, Literal, MemberExpression, ValueMeta,
|
||||||
},
|
},
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
@ -81,6 +81,7 @@ pub enum MathExpression {
|
|||||||
BinaryExpression(Box<BinaryExpression>),
|
BinaryExpression(Box<BinaryExpression>),
|
||||||
ExtendedBinaryExpression(Box<ExtendedBinaryExpression>),
|
ExtendedBinaryExpression(Box<ExtendedBinaryExpression>),
|
||||||
ParenthesisToken(Box<ParenthesisToken>),
|
ParenthesisToken(Box<ParenthesisToken>),
|
||||||
|
MemberExpression(Box<MemberExpression>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MathExpression {
|
impl MathExpression {
|
||||||
@ -92,6 +93,7 @@ impl MathExpression {
|
|||||||
MathExpression::BinaryExpression(binary_expression) => binary_expression.start(),
|
MathExpression::BinaryExpression(binary_expression) => binary_expression.start(),
|
||||||
MathExpression::ExtendedBinaryExpression(extended_binary_expression) => extended_binary_expression.start(),
|
MathExpression::ExtendedBinaryExpression(extended_binary_expression) => extended_binary_expression.start(),
|
||||||
MathExpression::ParenthesisToken(parenthesis_token) => parenthesis_token.start(),
|
MathExpression::ParenthesisToken(parenthesis_token) => parenthesis_token.start(),
|
||||||
|
MathExpression::MemberExpression(member_expression) => member_expression.start(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,6 +105,7 @@ impl MathExpression {
|
|||||||
MathExpression::BinaryExpression(binary_expression) => binary_expression.end(),
|
MathExpression::BinaryExpression(binary_expression) => binary_expression.end(),
|
||||||
MathExpression::ExtendedBinaryExpression(extended_binary_expression) => extended_binary_expression.end(),
|
MathExpression::ExtendedBinaryExpression(extended_binary_expression) => extended_binary_expression.end(),
|
||||||
MathExpression::ParenthesisToken(parenthesis_token) => parenthesis_token.end(),
|
MathExpression::ParenthesisToken(parenthesis_token) => parenthesis_token.end(),
|
||||||
|
MathExpression::MemberExpression(member_expression) => member_expression.end(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -133,7 +136,7 @@ impl ReversePolishNotation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let current_token = self.parser.get_token(0)?;
|
let current_token = self.parser.get_token(0)?;
|
||||||
if current_token.token_type == TokenType::Word || current_token.token_type == TokenType::Keyword {
|
if current_token.token_type == TokenType::Word {
|
||||||
if let Ok(next) = self.parser.get_token(1) {
|
if let Ok(next) = self.parser.get_token(1) {
|
||||||
if next.token_type == TokenType::Brace && next.value == "(" {
|
if next.token_type == TokenType::Brace && next.value == "(" {
|
||||||
let closing_brace = self.parser.find_closing_brace(1, 0, "")?;
|
let closing_brace = self.parser.find_closing_brace(1, 0, "")?;
|
||||||
@ -149,6 +152,24 @@ impl ReversePolishNotation {
|
|||||||
);
|
);
|
||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
}
|
}
|
||||||
|
if (current_token.token_type == TokenType::Word)
|
||||||
|
&& (next.token_type == TokenType::Period
|
||||||
|
|| (next.token_type == TokenType::Brace && next.value == "["))
|
||||||
|
{
|
||||||
|
// Find the end of the binary expression, ie the member expression.
|
||||||
|
let end = self.parser.make_member_expression(0)?.last_index;
|
||||||
|
let rpn = ReversePolishNotation::new(
|
||||||
|
&self.parser.tokens[end + 1..],
|
||||||
|
&self
|
||||||
|
.previous_postfix
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(self.parser.tokens[0..end + 1].iter().cloned())
|
||||||
|
.collect::<Vec<Token>>(),
|
||||||
|
&self.operators,
|
||||||
|
);
|
||||||
|
return rpn.parse();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let rpn = ReversePolishNotation::new(
|
let rpn = ReversePolishNotation::new(
|
||||||
@ -164,7 +185,6 @@ impl ReversePolishNotation {
|
|||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
} else if current_token.token_type == TokenType::Number
|
} else if current_token.token_type == TokenType::Number
|
||||||
|| current_token.token_type == TokenType::Word
|
|| current_token.token_type == TokenType::Word
|
||||||
|| current_token.token_type == TokenType::Keyword
|
|
||||||
|| current_token.token_type == TokenType::String
|
|| current_token.token_type == TokenType::String
|
||||||
{
|
{
|
||||||
let rpn = ReversePolishNotation::new(
|
let rpn = ReversePolishNotation::new(
|
||||||
@ -180,6 +200,35 @@ impl ReversePolishNotation {
|
|||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
} else if let Ok(binop) = BinaryOperator::from_str(current_token.value.as_str()) {
|
} else if let Ok(binop) = BinaryOperator::from_str(current_token.value.as_str()) {
|
||||||
if !self.operators.is_empty() {
|
if !self.operators.is_empty() {
|
||||||
|
if binop == BinaryOperator::Sub {
|
||||||
|
// We need to check if we have a "sub" and if the previous token is a word or
|
||||||
|
// number or string, then we need to treat it as a negative number.
|
||||||
|
// This oddity only applies to the "-" operator.
|
||||||
|
if let Some(prevtoken) = self.previous_postfix.last() {
|
||||||
|
if prevtoken.token_type == TokenType::Operator {
|
||||||
|
// Get the next token and see if it is a number.
|
||||||
|
if let Ok(nexttoken) = self.parser.get_token(1) {
|
||||||
|
if nexttoken.token_type == TokenType::Number {
|
||||||
|
// We have a negative number/ word or string.
|
||||||
|
// Change the value of the token to be the negative number/ word or string.
|
||||||
|
let mut new_token = nexttoken.clone();
|
||||||
|
new_token.value = format!("-{}", nexttoken.value);
|
||||||
|
let rpn = ReversePolishNotation::new(
|
||||||
|
&self.parser.tokens[2..],
|
||||||
|
&self
|
||||||
|
.previous_postfix
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(vec![new_token.clone()])
|
||||||
|
.collect::<Vec<Token>>(),
|
||||||
|
&self.operators,
|
||||||
|
);
|
||||||
|
return rpn.parse();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if let Ok(prevbinop) = BinaryOperator::from_str(self.operators[self.operators.len() - 1].value.as_str())
|
if let Ok(prevbinop) = BinaryOperator::from_str(self.operators[self.operators.len() - 1].value.as_str())
|
||||||
{
|
{
|
||||||
if prevbinop.precedence() >= binop.precedence() {
|
if prevbinop.precedence() >= binop.precedence() {
|
||||||
@ -196,6 +245,29 @@ impl ReversePolishNotation {
|
|||||||
return rpn.parse();
|
return rpn.parse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if self.previous_postfix.is_empty()
|
||||||
|
&& current_token.token_type == TokenType::Operator
|
||||||
|
&& current_token.value == "-"
|
||||||
|
{
|
||||||
|
if let Ok(nexttoken) = self.parser.get_token(1) {
|
||||||
|
if nexttoken.token_type == TokenType::Number {
|
||||||
|
// We have a negative number/ word or string.
|
||||||
|
// Change the value of the token to be the negative number/ word or string.
|
||||||
|
let mut new_token = nexttoken.clone();
|
||||||
|
new_token.value = format!("-{}", nexttoken.value);
|
||||||
|
let rpn = ReversePolishNotation::new(
|
||||||
|
&self.parser.tokens[2..],
|
||||||
|
&self
|
||||||
|
.previous_postfix
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.chain(vec![new_token.clone()])
|
||||||
|
.collect::<Vec<Token>>(),
|
||||||
|
&self.operators,
|
||||||
|
);
|
||||||
|
return rpn.parse();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let rpn = ReversePolishNotation::new(
|
let rpn = ReversePolishNotation::new(
|
||||||
@ -299,7 +371,7 @@ impl ReversePolishNotation {
|
|||||||
return Err(KclError::InvalidExpression(KclErrorDetails {
|
return Err(KclError::InvalidExpression(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([a.start(), a.end()])],
|
source_ranges: vec![SourceRange([a.start(), a.end()])],
|
||||||
message: format!("{:?}", a),
|
message: format!("{:?}", a),
|
||||||
}))
|
}));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -338,7 +410,7 @@ impl ReversePolishNotation {
|
|||||||
start_extended: None,
|
start_extended: None,
|
||||||
})));
|
})));
|
||||||
return self.build_tree(&reverse_polish_notation_tokens[1..], new_stack);
|
return self.build_tree(&reverse_polish_notation_tokens[1..], new_stack);
|
||||||
} else if current_token.token_type == TokenType::Word || current_token.token_type == TokenType::Keyword {
|
} else if current_token.token_type == TokenType::Word {
|
||||||
if reverse_polish_notation_tokens.len() > 1 {
|
if reverse_polish_notation_tokens.len() > 1 {
|
||||||
if reverse_polish_notation_tokens[1].token_type == TokenType::Brace
|
if reverse_polish_notation_tokens[1].token_type == TokenType::Brace
|
||||||
&& reverse_polish_notation_tokens[1].value == "("
|
&& reverse_polish_notation_tokens[1].value == "("
|
||||||
@ -350,6 +422,18 @@ impl ReversePolishNotation {
|
|||||||
)));
|
)));
|
||||||
return self.build_tree(&reverse_polish_notation_tokens[closing_brace + 1..], new_stack);
|
return self.build_tree(&reverse_polish_notation_tokens[closing_brace + 1..], new_stack);
|
||||||
}
|
}
|
||||||
|
if reverse_polish_notation_tokens[1].token_type == TokenType::Period
|
||||||
|
|| (reverse_polish_notation_tokens[1].token_type == TokenType::Brace
|
||||||
|
&& reverse_polish_notation_tokens[1].value == "[")
|
||||||
|
{
|
||||||
|
let mut new_stack = stack;
|
||||||
|
let member_expression = self.parser.make_member_expression(0)?;
|
||||||
|
new_stack.push(MathExpression::MemberExpression(Box::new(member_expression.expression)));
|
||||||
|
return self.build_tree(
|
||||||
|
&reverse_polish_notation_tokens[member_expression.last_index + 1..],
|
||||||
|
new_stack,
|
||||||
|
);
|
||||||
|
}
|
||||||
let mut new_stack = stack;
|
let mut new_stack = stack;
|
||||||
new_stack.push(MathExpression::Identifier(Box::new(Identifier {
|
new_stack.push(MathExpression::Identifier(Box::new(Identifier {
|
||||||
name: current_token.value.clone(),
|
name: current_token.value.clone(),
|
||||||
@ -396,7 +480,7 @@ impl ReversePolishNotation {
|
|||||||
return Err(KclError::InvalidExpression(KclErrorDetails {
|
return Err(KclError::InvalidExpression(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
message: format!("{:?}", a),
|
message: format!("{:?}", a),
|
||||||
}))
|
}));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let paran = match &stack[stack.len() - 2] {
|
let paran = match &stack[stack.len() - 2] {
|
||||||
@ -445,7 +529,7 @@ impl ReversePolishNotation {
|
|||||||
return Err(KclError::InvalidExpression(KclErrorDetails {
|
return Err(KclError::InvalidExpression(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
message: format!("{:?}", a),
|
message: format!("{:?}", a),
|
||||||
}))
|
}));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut new_stack = stack[0..stack.len() - 2].to_vec();
|
let mut new_stack = stack[0..stack.len() - 2].to_vec();
|
||||||
@ -483,6 +567,10 @@ impl ReversePolishNotation {
|
|||||||
MathExpression::Identifier(ident) => (BinaryPart::Identifier(ident.clone()), ident.start),
|
MathExpression::Identifier(ident) => (BinaryPart::Identifier(ident.clone()), ident.start),
|
||||||
MathExpression::CallExpression(call) => (BinaryPart::CallExpression(call.clone()), call.start),
|
MathExpression::CallExpression(call) => (BinaryPart::CallExpression(call.clone()), call.start),
|
||||||
MathExpression::BinaryExpression(bin_exp) => (BinaryPart::BinaryExpression(bin_exp.clone()), bin_exp.start),
|
MathExpression::BinaryExpression(bin_exp) => (BinaryPart::BinaryExpression(bin_exp.clone()), bin_exp.start),
|
||||||
|
MathExpression::MemberExpression(member_expression) => (
|
||||||
|
BinaryPart::MemberExpression(member_expression.clone()),
|
||||||
|
member_expression.start,
|
||||||
|
),
|
||||||
a => {
|
a => {
|
||||||
return Err(KclError::InvalidExpression(KclErrorDetails {
|
return Err(KclError::InvalidExpression(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
@ -513,6 +601,10 @@ impl ReversePolishNotation {
|
|||||||
MathExpression::Identifier(ident) => (BinaryPart::Identifier(ident.clone()), ident.end),
|
MathExpression::Identifier(ident) => (BinaryPart::Identifier(ident.clone()), ident.end),
|
||||||
MathExpression::CallExpression(call) => (BinaryPart::CallExpression(call.clone()), call.end),
|
MathExpression::CallExpression(call) => (BinaryPart::CallExpression(call.clone()), call.end),
|
||||||
MathExpression::BinaryExpression(bin_exp) => (BinaryPart::BinaryExpression(bin_exp.clone()), bin_exp.end),
|
MathExpression::BinaryExpression(bin_exp) => (BinaryPart::BinaryExpression(bin_exp.clone()), bin_exp.end),
|
||||||
|
MathExpression::MemberExpression(member_expression) => (
|
||||||
|
BinaryPart::MemberExpression(member_expression.clone()),
|
||||||
|
member_expression.end,
|
||||||
|
),
|
||||||
a => {
|
a => {
|
||||||
return Err(KclError::InvalidExpression(KclErrorDetails {
|
return Err(KclError::InvalidExpression(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
@ -521,13 +613,7 @@ impl ReversePolishNotation {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let right_end = match right.0.clone() {
|
let right_end = right.0.clone().end();
|
||||||
BinaryPart::BinaryExpression(_bin_exp) => right.1,
|
|
||||||
BinaryPart::Literal(lit) => lit.end,
|
|
||||||
BinaryPart::Identifier(ident) => ident.end,
|
|
||||||
BinaryPart::CallExpression(call) => call.end,
|
|
||||||
BinaryPart::UnaryExpression(unary_exp) => unary_exp.end,
|
|
||||||
};
|
|
||||||
|
|
||||||
let tree = BinaryExpression {
|
let tree = BinaryExpression {
|
||||||
operator: BinaryOperator::from_str(¤t_token.value.clone()).map_err(|err| {
|
operator: BinaryOperator::from_str(¤t_token.value.clone()).map_err(|err| {
|
||||||
@ -562,25 +648,13 @@ impl MathParser {
|
|||||||
pub fn parse(&mut self) -> Result<BinaryExpression, KclError> {
|
pub fn parse(&mut self) -> Result<BinaryExpression, KclError> {
|
||||||
let rpn = self.rpn.parse()?;
|
let rpn = self.rpn.parse()?;
|
||||||
let tree_with_maybe_bad_top_level_start_end = self.rpn.build_tree(&rpn, vec![])?;
|
let tree_with_maybe_bad_top_level_start_end = self.rpn.build_tree(&rpn, vec![])?;
|
||||||
let left_start = match tree_with_maybe_bad_top_level_start_end.clone().left {
|
let left_start = tree_with_maybe_bad_top_level_start_end.clone().left.start();
|
||||||
BinaryPart::BinaryExpression(bin_exp) => bin_exp.start,
|
|
||||||
BinaryPart::Literal(lit) => lit.start,
|
|
||||||
BinaryPart::Identifier(ident) => ident.start,
|
|
||||||
BinaryPart::CallExpression(call) => call.start,
|
|
||||||
BinaryPart::UnaryExpression(unary_exp) => unary_exp.start,
|
|
||||||
};
|
|
||||||
let min_start = if left_start < tree_with_maybe_bad_top_level_start_end.start {
|
let min_start = if left_start < tree_with_maybe_bad_top_level_start_end.start {
|
||||||
left_start
|
left_start
|
||||||
} else {
|
} else {
|
||||||
tree_with_maybe_bad_top_level_start_end.start
|
tree_with_maybe_bad_top_level_start_end.start
|
||||||
};
|
};
|
||||||
let right_end = match tree_with_maybe_bad_top_level_start_end.clone().right {
|
let right_end = tree_with_maybe_bad_top_level_start_end.clone().right.end();
|
||||||
BinaryPart::BinaryExpression(bin_exp) => bin_exp.end,
|
|
||||||
BinaryPart::Literal(lit) => lit.end,
|
|
||||||
BinaryPart::Identifier(ident) => ident.end,
|
|
||||||
BinaryPart::CallExpression(call) => call.end,
|
|
||||||
BinaryPart::UnaryExpression(unary_exp) => unary_exp.end,
|
|
||||||
};
|
|
||||||
let max_end = if right_end > tree_with_maybe_bad_top_level_start_end.end {
|
let max_end = if right_end > tree_with_maybe_bad_top_level_start_end.end {
|
||||||
right_end
|
right_end
|
||||||
} else {
|
} else {
|
||||||
@ -629,6 +703,60 @@ mod test {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_expression_add_no_spaces() {
|
||||||
|
let tokens = crate::tokeniser::lexer("1+2");
|
||||||
|
let mut parser = MathParser::new(&tokens);
|
||||||
|
let result = parser.parse().unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
BinaryExpression {
|
||||||
|
operator: BinaryOperator::Add,
|
||||||
|
start: 0,
|
||||||
|
end: 3,
|
||||||
|
left: BinaryPart::Literal(Box::new(Literal {
|
||||||
|
value: serde_json::Value::Number(serde_json::Number::from(1)),
|
||||||
|
raw: "1".to_string(),
|
||||||
|
start: 0,
|
||||||
|
end: 1,
|
||||||
|
})),
|
||||||
|
right: BinaryPart::Literal(Box::new(Literal {
|
||||||
|
value: serde_json::Value::Number(serde_json::Number::from(2)),
|
||||||
|
raw: "2".to_string(),
|
||||||
|
start: 2,
|
||||||
|
end: 3,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_expression_sub_no_spaces() {
|
||||||
|
let tokens = crate::tokeniser::lexer("1 -2");
|
||||||
|
let mut parser = MathParser::new(&tokens);
|
||||||
|
let result = parser.parse().unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
result,
|
||||||
|
BinaryExpression {
|
||||||
|
operator: BinaryOperator::Sub,
|
||||||
|
start: 0,
|
||||||
|
end: 4,
|
||||||
|
left: BinaryPart::Literal(Box::new(Literal {
|
||||||
|
value: serde_json::Value::Number(serde_json::Number::from(1)),
|
||||||
|
raw: "1".to_string(),
|
||||||
|
start: 0,
|
||||||
|
end: 1,
|
||||||
|
})),
|
||||||
|
right: BinaryPart::Literal(Box::new(Literal {
|
||||||
|
value: serde_json::Value::Number(serde_json::Number::from(2)),
|
||||||
|
raw: "2".to_string(),
|
||||||
|
start: 3,
|
||||||
|
end: 4,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_expression_plus_followed_by_star() {
|
fn test_parse_expression_plus_followed_by_star() {
|
||||||
let tokens = crate::tokeniser::lexer("1 + 2 * 3");
|
let tokens = crate::tokeniser::lexer("1 + 2 * 3");
|
||||||
|
@ -550,22 +550,41 @@ impl Parser {
|
|||||||
&self,
|
&self,
|
||||||
index: usize,
|
index: usize,
|
||||||
_previous_keys: Option<Vec<ObjectKeyInfo>>,
|
_previous_keys: Option<Vec<ObjectKeyInfo>>,
|
||||||
|
has_opening_brace: bool,
|
||||||
) -> Result<Vec<ObjectKeyInfo>, KclError> {
|
) -> Result<Vec<ObjectKeyInfo>, KclError> {
|
||||||
let previous_keys = _previous_keys.unwrap_or(vec![]);
|
let previous_keys = _previous_keys.unwrap_or(vec![]);
|
||||||
let next_token = self.next_meaningful_token(index, None)?;
|
let next_token = self.next_meaningful_token(index, None)?;
|
||||||
let _next_token = next_token.clone();
|
if next_token.index == self.tokens.len() - 1 {
|
||||||
if _next_token.index == self.tokens.len() - 1 {
|
|
||||||
return Ok(previous_keys);
|
return Ok(previous_keys);
|
||||||
}
|
}
|
||||||
let period_or_opening_bracket = match next_token.token {
|
let mut has_opening_brace = match &next_token.token {
|
||||||
Some(next_token_val) => {
|
Some(next_token_val) => {
|
||||||
if next_token_val.token_type == TokenType::Brace && next_token_val.value == "]" {
|
if next_token_val.token_type == TokenType::Brace && next_token_val.value == "[" {
|
||||||
self.next_meaningful_token(next_token.index, None)?
|
true
|
||||||
} else {
|
} else {
|
||||||
_next_token
|
has_opening_brace
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => _next_token,
|
None => has_opening_brace,
|
||||||
|
};
|
||||||
|
let period_or_opening_bracket = match &next_token.token {
|
||||||
|
Some(next_token_val) => {
|
||||||
|
if has_opening_brace && next_token_val.token_type == TokenType::Brace && next_token_val.value == "]" {
|
||||||
|
// We need to reset our has_opening_brace flag, since we've closed it.
|
||||||
|
has_opening_brace = false;
|
||||||
|
let next_next_token = self.next_meaningful_token(next_token.index, None)?;
|
||||||
|
if let Some(next_next_token_val) = &next_next_token.token {
|
||||||
|
if next_next_token_val.token_type == TokenType::Brace && next_next_token_val.value == "[" {
|
||||||
|
// Set the opening brace flag again, since we've opened it again.
|
||||||
|
has_opening_brace = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
next_next_token.clone()
|
||||||
|
} else {
|
||||||
|
next_token.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => next_token.clone(),
|
||||||
};
|
};
|
||||||
if let Some(period_or_opening_bracket_token) = period_or_opening_bracket.token {
|
if let Some(period_or_opening_bracket_token) = period_or_opening_bracket.token {
|
||||||
if period_or_opening_bracket_token.token_type != TokenType::Period
|
if period_or_opening_bracket_token.token_type != TokenType::Period
|
||||||
@ -573,11 +592,26 @@ impl Parser {
|
|||||||
{
|
{
|
||||||
return Ok(previous_keys);
|
return Ok(previous_keys);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We don't care if we never opened the brace.
|
||||||
|
if !has_opening_brace && period_or_opening_bracket_token.token_type == TokenType::Brace {
|
||||||
|
return Ok(previous_keys);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure its the right kind of brace, we don't care about ().
|
||||||
|
if period_or_opening_bracket_token.token_type == TokenType::Brace
|
||||||
|
&& period_or_opening_bracket_token.value != "["
|
||||||
|
&& period_or_opening_bracket_token.value != "]"
|
||||||
|
{
|
||||||
|
return Ok(previous_keys);
|
||||||
|
}
|
||||||
|
|
||||||
let key_token = self.next_meaningful_token(period_or_opening_bracket.index, None)?;
|
let key_token = self.next_meaningful_token(period_or_opening_bracket.index, None)?;
|
||||||
let next_period_or_opening_bracket = self.next_meaningful_token(key_token.index, None)?;
|
let next_period_or_opening_bracket = self.next_meaningful_token(key_token.index, None)?;
|
||||||
let is_braced = match next_period_or_opening_bracket.token {
|
let is_braced = match next_period_or_opening_bracket.token {
|
||||||
Some(next_period_or_opening_bracket_val) => {
|
Some(next_period_or_opening_bracket_val) => {
|
||||||
next_period_or_opening_bracket_val.token_type == TokenType::Brace
|
has_opening_brace
|
||||||
|
&& next_period_or_opening_bracket_val.token_type == TokenType::Brace
|
||||||
&& next_period_or_opening_bracket_val.value == "]"
|
&& next_period_or_opening_bracket_val.value == "]"
|
||||||
}
|
}
|
||||||
None => false,
|
None => false,
|
||||||
@ -604,7 +638,7 @@ impl Parser {
|
|||||||
index: end_index,
|
index: end_index,
|
||||||
computed,
|
computed,
|
||||||
});
|
});
|
||||||
self.collect_object_keys(key_token.index, Some(new_previous_keys))
|
self.collect_object_keys(key_token.index, Some(new_previous_keys), has_opening_brace)
|
||||||
} else {
|
} else {
|
||||||
Err(KclError::Unimplemented(KclErrorDetails {
|
Err(KclError::Unimplemented(KclErrorDetails {
|
||||||
source_ranges: vec![period_or_opening_bracket_token.clone().into()],
|
source_ranges: vec![period_or_opening_bracket_token.clone().into()],
|
||||||
@ -616,9 +650,9 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
|
pub fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
let mut keys_info = self.collect_object_keys(index, None)?;
|
let mut keys_info = self.collect_object_keys(index, None, false)?;
|
||||||
if keys_info.is_empty() {
|
if keys_info.is_empty() {
|
||||||
return Err(KclError::Syntax(KclErrorDetails {
|
return Err(KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
@ -653,6 +687,7 @@ impl Parser {
|
|||||||
|
|
||||||
fn find_end_of_binary_expression(&self, index: usize) -> Result<usize, KclError> {
|
fn find_end_of_binary_expression(&self, index: usize) -> Result<usize, KclError> {
|
||||||
let current_token = self.get_token(index)?;
|
let current_token = self.get_token(index)?;
|
||||||
|
|
||||||
if current_token.token_type == TokenType::Brace && current_token.value == "(" {
|
if current_token.token_type == TokenType::Brace && current_token.value == "(" {
|
||||||
let closing_parenthesis = self.find_closing_brace(index, 0, "")?;
|
let closing_parenthesis = self.find_closing_brace(index, 0, "")?;
|
||||||
let maybe_another_operator = self.next_meaningful_token(closing_parenthesis, None)?;
|
let maybe_another_operator = self.next_meaningful_token(closing_parenthesis, None)?;
|
||||||
@ -669,10 +704,17 @@ impl Parser {
|
|||||||
Ok(closing_parenthesis)
|
Ok(closing_parenthesis)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (current_token.token_type == TokenType::Keyword || current_token.token_type == TokenType::Word)
|
|
||||||
&& self.get_token(index + 1)?.token_type == TokenType::Brace
|
if current_token.token_type == TokenType::Word {
|
||||||
&& self.get_token(index + 1)?.value == "("
|
if let Ok(next_token) = self.get_token(index + 1) {
|
||||||
|
if next_token.token_type == TokenType::Period
|
||||||
|
|| (next_token.token_type == TokenType::Brace && next_token.value == "[")
|
||||||
{
|
{
|
||||||
|
let member_expression = self.make_member_expression(index)?;
|
||||||
|
return self.find_end_of_binary_expression(member_expression.last_index);
|
||||||
|
}
|
||||||
|
|
||||||
|
if next_token.token_type == TokenType::Brace && next_token.value == "(" {
|
||||||
let closing_parenthesis = self.find_closing_brace(index + 1, 0, "")?;
|
let closing_parenthesis = self.find_closing_brace(index + 1, 0, "")?;
|
||||||
let maybe_another_operator = self.next_meaningful_token(closing_parenthesis, None)?;
|
let maybe_another_operator = self.next_meaningful_token(closing_parenthesis, None)?;
|
||||||
return if let Some(maybe_another_operator_token) = maybe_another_operator.token {
|
return if let Some(maybe_another_operator_token) = maybe_another_operator.token {
|
||||||
@ -688,9 +730,16 @@ impl Parser {
|
|||||||
Ok(closing_parenthesis)
|
Ok(closing_parenthesis)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let maybe_operator = self.next_meaningful_token(index, None)?;
|
let maybe_operator = self.next_meaningful_token(index, None)?;
|
||||||
if let Some(maybe_operator_token) = maybe_operator.token {
|
if let Some(maybe_operator_token) = maybe_operator.token {
|
||||||
if maybe_operator_token.token_type != TokenType::Operator || maybe_operator_token.value == PIPE_OPERATOR {
|
if maybe_operator_token.token_type == TokenType::Number {
|
||||||
|
return self.find_end_of_binary_expression(maybe_operator.index);
|
||||||
|
} else if maybe_operator_token.token_type != TokenType::Operator
|
||||||
|
|| maybe_operator_token.value == PIPE_OPERATOR
|
||||||
|
{
|
||||||
return Ok(index);
|
return Ok(index);
|
||||||
}
|
}
|
||||||
let next_right = self.next_meaningful_token(maybe_operator.index, None)?;
|
let next_right = self.next_meaningful_token(maybe_operator.index, None)?;
|
||||||
@ -731,7 +780,6 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if current_token.token_type == TokenType::Word
|
if current_token.token_type == TokenType::Word
|
||||||
|| current_token.token_type == TokenType::Keyword
|
|
||||||
|| current_token.token_type == TokenType::Number
|
|| current_token.token_type == TokenType::Number
|
||||||
|| current_token.token_type == TokenType::String
|
|| current_token.token_type == TokenType::String
|
||||||
{
|
{
|
||||||
@ -745,6 +793,29 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Account for negative numbers.
|
||||||
|
if current_token.token_type == TokenType::Operator || current_token.value == "-" {
|
||||||
|
if let Some(next_token) = &next.token {
|
||||||
|
if next_token.token_type == TokenType::Word
|
||||||
|
|| next_token.token_type == TokenType::Number
|
||||||
|
|| next_token.token_type == TokenType::String
|
||||||
|
{
|
||||||
|
// See if the next token is an operator.
|
||||||
|
let next_right = self.next_meaningful_token(next.index, None)?;
|
||||||
|
if let Some(next_right_token) = next_right.token {
|
||||||
|
if next_right_token.token_type == TokenType::Operator {
|
||||||
|
let binary_expression = self.make_binary_expression(index)?;
|
||||||
|
return Ok(ValueReturn {
|
||||||
|
value: Value::BinaryExpression(Box::new(binary_expression.expression)),
|
||||||
|
last_index: binary_expression.last_index,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if current_token.token_type == TokenType::Brace && current_token.value == "{" {
|
if current_token.token_type == TokenType::Brace && current_token.value == "{" {
|
||||||
let object_expression = self.make_object_expression(index)?;
|
let object_expression = self.make_object_expression(index)?;
|
||||||
return Ok(ValueReturn {
|
return Ok(ValueReturn {
|
||||||
@ -761,11 +832,25 @@ impl Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(next_token) = next.token {
|
if let Some(next_token) = next.token {
|
||||||
if (current_token.token_type == TokenType::Keyword || current_token.token_type == TokenType::Word)
|
if (current_token.token_type == TokenType::Word)
|
||||||
&& (next_token.token_type == TokenType::Period
|
&& (next_token.token_type == TokenType::Period
|
||||||
|| (next_token.token_type == TokenType::Brace && next_token.value == "["))
|
|| (next_token.token_type == TokenType::Brace && next_token.value == "["))
|
||||||
{
|
{
|
||||||
let member_expression = self.make_member_expression(index)?;
|
let member_expression = self.make_member_expression(index)?;
|
||||||
|
// If the next token is an operator, we need to make a binary expression.
|
||||||
|
let next_right = self.next_meaningful_token(member_expression.last_index, None)?;
|
||||||
|
if let Some(next_right_token) = next_right.token {
|
||||||
|
if next_right_token.token_type == TokenType::Operator
|
||||||
|
|| next_right_token.token_type == TokenType::Number
|
||||||
|
{
|
||||||
|
let binary_expression = self.make_binary_expression(index)?;
|
||||||
|
return Ok(ValueReturn {
|
||||||
|
value: Value::BinaryExpression(Box::new(binary_expression.expression)),
|
||||||
|
last_index: binary_expression.last_index,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return Ok(ValueReturn {
|
return Ok(ValueReturn {
|
||||||
value: Value::MemberExpression(Box::new(member_expression.expression)),
|
value: Value::MemberExpression(Box::new(member_expression.expression)),
|
||||||
last_index: member_expression.last_index,
|
last_index: member_expression.last_index,
|
||||||
@ -820,7 +905,7 @@ impl Parser {
|
|||||||
|
|
||||||
Err(KclError::Unexpected(KclErrorDetails {
|
Err(KclError::Unexpected(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
message: format!("{:?}", current_token.token_type),
|
message: format!("Unexpected token {:?}", current_token),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -949,6 +1034,7 @@ impl Parser {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
let argument_token = self.next_meaningful_token(index, None)?;
|
let argument_token = self.next_meaningful_token(index, None)?;
|
||||||
|
|
||||||
if let Some(argument_token_token) = argument_token.token {
|
if let Some(argument_token_token) = argument_token.token {
|
||||||
let next_brace_or_comma = self.next_meaningful_token(argument_token.index, None)?;
|
let next_brace_or_comma = self.next_meaningful_token(argument_token.index, None)?;
|
||||||
if let Some(next_brace_or_comma_token) = next_brace_or_comma.token {
|
if let Some(next_brace_or_comma_token) = next_brace_or_comma.token {
|
||||||
@ -962,12 +1048,25 @@ impl Parser {
|
|||||||
_previous_args.push(Value::ArrayExpression(Box::new(array_expression.expression)));
|
_previous_args.push(Value::ArrayExpression(Box::new(array_expression.expression)));
|
||||||
return self.make_arguments(next_comma_or_brace_token_index, _previous_args);
|
return self.make_arguments(next_comma_or_brace_token_index, _previous_args);
|
||||||
}
|
}
|
||||||
if argument_token_token.token_type == TokenType::Operator && argument_token_token.value == "-" {
|
|
||||||
let unary_expression = self.make_unary_expression(argument_token.index)?;
|
if (argument_token_token.token_type == TokenType::Word)
|
||||||
let next_comma_or_brace_token_index =
|
&& (next_brace_or_comma_token.token_type == TokenType::Period
|
||||||
self.next_meaningful_token(unary_expression.last_index, None)?.index;
|
|| (next_brace_or_comma_token.token_type == TokenType::Brace
|
||||||
|
&& next_brace_or_comma_token.value == "["))
|
||||||
|
{
|
||||||
|
let member_expression = self.make_member_expression(argument_token.index)?;
|
||||||
let mut _previous_args = previous_args;
|
let mut _previous_args = previous_args;
|
||||||
_previous_args.push(Value::UnaryExpression(Box::new(unary_expression.expression)));
|
_previous_args.push(Value::MemberExpression(Box::new(member_expression.expression)));
|
||||||
|
let next_comma_or_brace_token_index =
|
||||||
|
self.next_meaningful_token(member_expression.last_index, None)?.index;
|
||||||
|
return self.make_arguments(next_comma_or_brace_token_index, _previous_args);
|
||||||
|
}
|
||||||
|
|
||||||
|
if argument_token_token.token_type == TokenType::Operator && argument_token_token.value == "-" {
|
||||||
|
let value = self.make_value(argument_token.index)?;
|
||||||
|
let next_comma_or_brace_token_index = self.next_meaningful_token(value.last_index, None)?.index;
|
||||||
|
let mut _previous_args = previous_args;
|
||||||
|
_previous_args.push(value.value);
|
||||||
return self.make_arguments(next_comma_or_brace_token_index, _previous_args);
|
return self.make_arguments(next_comma_or_brace_token_index, _previous_args);
|
||||||
}
|
}
|
||||||
if argument_token_token.token_type == TokenType::Brace && argument_token_token.value == "{" {
|
if argument_token_token.token_type == TokenType::Brace && argument_token_token.value == "{" {
|
||||||
@ -998,6 +1097,7 @@ impl Parser {
|
|||||||
_previous_args.push(Value::BinaryExpression(Box::new(binary_expression.expression)));
|
_previous_args.push(Value::BinaryExpression(Box::new(binary_expression.expression)));
|
||||||
return self.make_arguments(binary_expression.last_index, _previous_args);
|
return self.make_arguments(binary_expression.last_index, _previous_args);
|
||||||
}
|
}
|
||||||
|
|
||||||
if argument_token_token.token_type == TokenType::Operator
|
if argument_token_token.token_type == TokenType::Operator
|
||||||
&& argument_token_token.value == PIPE_SUBSTITUTION_OPERATOR
|
&& argument_token_token.value == PIPE_SUBSTITUTION_OPERATOR
|
||||||
{
|
{
|
||||||
@ -1177,7 +1277,7 @@ impl Parser {
|
|||||||
kind: VariableKind::from_str(¤t_token.value).map_err(|_| {
|
kind: VariableKind::from_str(¤t_token.value).map_err(|_| {
|
||||||
KclError::Syntax(KclErrorDetails {
|
KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
message: "Unexpected token".to_string(),
|
message: format!("Unexpected token: {}", current_token.value),
|
||||||
})
|
})
|
||||||
})?,
|
})?,
|
||||||
declarations: variable_declarators_result.declarations,
|
declarations: variable_declarators_result.declarations,
|
||||||
@ -1239,10 +1339,11 @@ impl Parser {
|
|||||||
Value::Literal(literal) => BinaryPart::Literal(literal),
|
Value::Literal(literal) => BinaryPart::Literal(literal),
|
||||||
Value::UnaryExpression(unary_expression) => BinaryPart::UnaryExpression(unary_expression),
|
Value::UnaryExpression(unary_expression) => BinaryPart::UnaryExpression(unary_expression),
|
||||||
Value::CallExpression(call_expression) => BinaryPart::CallExpression(call_expression),
|
Value::CallExpression(call_expression) => BinaryPart::CallExpression(call_expression),
|
||||||
|
Value::MemberExpression(member_expression) => BinaryPart::MemberExpression(member_expression),
|
||||||
_ => {
|
_ => {
|
||||||
return Err(KclError::Syntax(KclErrorDetails {
|
return Err(KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: vec![current_token.into()],
|
source_ranges: vec![current_token.into()],
|
||||||
message: "Invalid argument for unary expression".to_string(),
|
message: format!("Invalid argument for unary expression: {:?}", argument.value),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -1513,7 +1614,7 @@ impl Parser {
|
|||||||
|
|
||||||
Err(KclError::Syntax(KclErrorDetails {
|
Err(KclError::Syntax(KclErrorDetails {
|
||||||
source_ranges: vec![token.into()],
|
source_ranges: vec![token.into()],
|
||||||
message: "unexpected token".to_string(),
|
message: format!("unexpected token {}", token.value),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1730,7 +1831,7 @@ const key = 'c'"#,
|
|||||||
fn test_collect_object_keys() {
|
fn test_collect_object_keys() {
|
||||||
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
let tokens = crate::tokeniser::lexer("const prop = yo.one[\"two\"]");
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
let keys_info = parser.collect_object_keys(6, None).unwrap();
|
let keys_info = parser.collect_object_keys(6, None, false).unwrap();
|
||||||
assert_eq!(keys_info.len(), 2);
|
assert_eq!(keys_info.len(), 2);
|
||||||
let first_key = match keys_info[0].key.clone() {
|
let first_key = match keys_info[0].key.clone() {
|
||||||
LiteralIdentifier::Identifier(identifier) => format!("identifier-{}", identifier.name),
|
LiteralIdentifier::Identifier(identifier) => format!("identifier-{}", identifier.name),
|
||||||
@ -2759,6 +2860,73 @@ show(mySk1)"#;
|
|||||||
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_double_nested_braces() {
|
||||||
|
let tokens = crate::tokeniser::lexer(r#"const prop = yo["one"][two]"#);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_period_number_first() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = 1 - obj.a"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_brace_number_first() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = 1 - obj["a"]"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_brace_number_second() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = obj["a"] - 1"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_in_array_number_first() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = [1 - obj["a"], 0]"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_in_array_number_second() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = [obj["a"] - 1, 0]"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_member_expression_binary_expression_in_array_number_second_missing_space() {
|
||||||
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
r#"const obj = { a: 1, b: 2 }
|
||||||
|
const height = [obj["a"] -1, 0]"#,
|
||||||
|
);
|
||||||
|
let parser = Parser::new(tokens);
|
||||||
|
parser.ast().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_half_pipe() {
|
fn test_parse_half_pipe() {
|
||||||
let tokens = crate::tokeniser::lexer(
|
let tokens = crate::tokeniser::lexer(
|
||||||
|
@ -161,34 +161,12 @@ pub enum LineData {
|
|||||||
/// A point with a tag.
|
/// A point with a tag.
|
||||||
PointWithTag {
|
PointWithTag {
|
||||||
/// The to point.
|
/// The to point.
|
||||||
to: PointOrDefault,
|
to: [f64; 2],
|
||||||
/// The tag.
|
/// The tag.
|
||||||
tag: String,
|
tag: String,
|
||||||
},
|
},
|
||||||
/// A point.
|
/// A point.
|
||||||
Point([f64; 2]),
|
Point([f64; 2]),
|
||||||
/// A string like `default`.
|
|
||||||
Default(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A point or a default value.
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
|
||||||
#[ts(export)]
|
|
||||||
#[serde(rename_all = "camelCase", untagged)]
|
|
||||||
pub enum PointOrDefault {
|
|
||||||
/// A point.
|
|
||||||
Point([f64; 2]),
|
|
||||||
/// A string like `default`.
|
|
||||||
Default(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PointOrDefault {
|
|
||||||
fn get_point_with_default(&self, default: [f64; 2]) -> [f64; 2] {
|
|
||||||
match self {
|
|
||||||
PointOrDefault::Point(point) => *point,
|
|
||||||
PointOrDefault::Default(_) => default,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Draw a line.
|
/// Draw a line.
|
||||||
@ -205,12 +183,9 @@ pub fn line(args: &mut Args) -> Result<MemoryItem, KclError> {
|
|||||||
}]
|
}]
|
||||||
fn inner_line(data: LineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
fn inner_line(data: LineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
||||||
let from = sketch_group.get_coords_from_paths()?;
|
let from = sketch_group.get_coords_from_paths()?;
|
||||||
|
|
||||||
let default = [0.2, 1.0];
|
|
||||||
let inner_args = match &data {
|
let inner_args = match &data {
|
||||||
LineData::PointWithTag { to, .. } => to.get_point_with_default(default),
|
LineData::PointWithTag { to, .. } => *to,
|
||||||
LineData::Point(to) => *to,
|
LineData::Point(to) => *to,
|
||||||
LineData::Default(_) => default,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let to = [from.x + inner_args[0], from.y + inner_args[1]];
|
let to = [from.x + inner_args[0], from.y + inner_args[1]];
|
||||||
@ -283,10 +258,7 @@ pub fn x_line(args: &mut Args) -> Result<MemoryItem, KclError> {
|
|||||||
}]
|
}]
|
||||||
fn inner_x_line(data: AxisLineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
fn inner_x_line(data: AxisLineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
||||||
let line_data = match data {
|
let line_data = match data {
|
||||||
AxisLineData::LengthWithTag { length, tag } => LineData::PointWithTag {
|
AxisLineData::LengthWithTag { length, tag } => LineData::PointWithTag { to: [length, 0.0], tag },
|
||||||
to: PointOrDefault::Point([length, 0.0]),
|
|
||||||
tag,
|
|
||||||
},
|
|
||||||
AxisLineData::Length(length) => LineData::Point([length, 0.0]),
|
AxisLineData::Length(length) => LineData::Point([length, 0.0]),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -308,10 +280,7 @@ pub fn y_line(args: &mut Args) -> Result<MemoryItem, KclError> {
|
|||||||
}]
|
}]
|
||||||
fn inner_y_line(data: AxisLineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
fn inner_y_line(data: AxisLineData, sketch_group: SketchGroup, args: &mut Args) -> Result<SketchGroup, KclError> {
|
||||||
let line_data = match data {
|
let line_data = match data {
|
||||||
AxisLineData::LengthWithTag { length, tag } => LineData::PointWithTag {
|
AxisLineData::LengthWithTag { length, tag } => LineData::PointWithTag { to: [0.0, length], tag },
|
||||||
to: PointOrDefault::Point([0.0, length]),
|
|
||||||
tag,
|
|
||||||
},
|
|
||||||
AxisLineData::Length(length) => LineData::Point([0.0, length]),
|
AxisLineData::Length(length) => LineData::Point([0.0, length]),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -427,10 +396,7 @@ fn inner_angled_line_of_x_length(
|
|||||||
|
|
||||||
let new_sketch_group = inner_line(
|
let new_sketch_group = inner_line(
|
||||||
if let AngledLineData::AngleWithTag { tag, .. } = data {
|
if let AngledLineData::AngleWithTag { tag, .. } = data {
|
||||||
LineData::PointWithTag {
|
LineData::PointWithTag { to, tag }
|
||||||
to: PointOrDefault::Point(to),
|
|
||||||
tag,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
LineData::Point(to)
|
LineData::Point(to)
|
||||||
},
|
},
|
||||||
@ -525,10 +491,7 @@ fn inner_angled_line_of_y_length(
|
|||||||
|
|
||||||
let new_sketch_group = inner_line(
|
let new_sketch_group = inner_line(
|
||||||
if let AngledLineData::AngleWithTag { tag, .. } = data {
|
if let AngledLineData::AngleWithTag { tag, .. } = data {
|
||||||
LineData::PointWithTag {
|
LineData::PointWithTag { to, tag }
|
||||||
to: PointOrDefault::Point(to),
|
|
||||||
tag,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
LineData::Point(to)
|
LineData::Point(to)
|
||||||
},
|
},
|
||||||
@ -654,11 +617,9 @@ pub fn start_sketch_at(args: &mut Args) -> Result<MemoryItem, KclError> {
|
|||||||
name = "startSketchAt",
|
name = "startSketchAt",
|
||||||
}]
|
}]
|
||||||
fn inner_start_sketch_at(data: LineData, args: &mut Args) -> Result<SketchGroup, KclError> {
|
fn inner_start_sketch_at(data: LineData, args: &mut Args) -> Result<SketchGroup, KclError> {
|
||||||
let default = [0.0, 0.0];
|
|
||||||
let to = match &data {
|
let to = match &data {
|
||||||
LineData::PointWithTag { to, .. } => to.get_point_with_default(default),
|
LineData::PointWithTag { to, .. } => *to,
|
||||||
LineData::Point(to) => *to,
|
LineData::Point(to) => *to,
|
||||||
LineData::Default(_) => default,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let id = uuid::Uuid::new_v4();
|
let id = uuid::Uuid::new_v4();
|
||||||
@ -992,16 +953,12 @@ mod tests {
|
|||||||
|
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
use crate::std::sketch::{LineData, PointOrDefault};
|
use crate::std::sketch::LineData;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_deserialize_line_data() {
|
fn test_deserialize_line_data() {
|
||||||
let mut str_json = "\"default\"".to_string();
|
|
||||||
let data: LineData = serde_json::from_str(&str_json).unwrap();
|
|
||||||
assert_eq!(data, LineData::Default("default".to_string()));
|
|
||||||
|
|
||||||
let data = LineData::Point([0.0, 1.0]);
|
let data = LineData::Point([0.0, 1.0]);
|
||||||
str_json = serde_json::to_string(&data).unwrap();
|
let mut str_json = serde_json::to_string(&data).unwrap();
|
||||||
assert_eq!(str_json, "[0.0,1.0]");
|
assert_eq!(str_json, "[0.0,1.0]");
|
||||||
|
|
||||||
str_json = "[0, 1]".to_string();
|
str_json = "[0, 1]".to_string();
|
||||||
@ -1013,7 +970,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
data,
|
data,
|
||||||
LineData::PointWithTag {
|
LineData::PointWithTag {
|
||||||
to: PointOrDefault::Point([0.0, 1.0]),
|
to: [0.0, 1.0],
|
||||||
tag: "thing".to_string()
|
tag: "thing".to_string()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -296,13 +296,6 @@ fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
|
|||||||
start_index,
|
start_index,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
if is_number(str_from_index) {
|
|
||||||
return Some(make_token(
|
|
||||||
TokenType::Number,
|
|
||||||
&match_first(str_from_index, &NUMBER)?,
|
|
||||||
start_index,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if is_operator(str_from_index) {
|
if is_operator(str_from_index) {
|
||||||
return Some(make_token(
|
return Some(make_token(
|
||||||
TokenType::Operator,
|
TokenType::Operator,
|
||||||
@ -310,6 +303,13 @@ fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
|
|||||||
start_index,
|
start_index,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
if is_number(str_from_index) {
|
||||||
|
return Some(make_token(
|
||||||
|
TokenType::Number,
|
||||||
|
&match_first(str_from_index, &NUMBER)?,
|
||||||
|
start_index,
|
||||||
|
));
|
||||||
|
}
|
||||||
if is_keyword(str_from_index) {
|
if is_keyword(str_from_index) {
|
||||||
return Some(make_token(
|
return Some(make_token(
|
||||||
TokenType::Keyword,
|
TokenType::Keyword,
|
||||||
|
Reference in New Issue
Block a user