Fix KCL source ranges to know which source file they point to (#4418)

* Add ts_rs feature to work with indexmap

* Add feature for schemars to work with indexmap

* Add module ID to intern module paths

* Update code to use new source range with three fields

* Update generated files

* Update docs

* Fix wasm

* Fix TS code to use new SourceRange

* Fix TS tests to use new SourceRange and moduleId

* Fix formatting

* Fix to filter errors and source ranges to only show the top-level module

* Fix to reuse module IDs

* Fix to disallow empty path for import

* Revert unneeded Self change

* Rename field to be clearer

* Fix parser tests

* Update snapshots

* Change to not serialize module_id of 0

* Update snapshots after adding default module_id

* Move module_id functions to separate module

* Fix tests for console errors

* Proposal: module ID = 0 gets skipped when serializing tokens too (#4422)

Just like in AST nodes.

Also I think "is_top_level" communicates intention better than is_default

---------

Co-authored-by: Adam Chalmers <adam.chalmers@zoo.dev>
This commit is contained in:
Jonathan Tran
2024-11-07 11:23:41 -05:00
committed by 49lf
parent 0128c67aae
commit 91049204c5
80 changed files with 1789 additions and 1246 deletions

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,7 @@ export const isErrorWhitelisted = (exception: Error) => {
{
name: '"{"kind"',
message:
'"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}"',
'"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}"',
stack: '',
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
project: 'Google Chrome',
@ -156,8 +156,8 @@ export const isErrorWhitelisted = (exception: Error) => {
{
name: 'Unhandled Promise Rejection',
message:
'{"kind":"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}',
stack: `Unhandled Promise Rejection: {"kind":"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: \`JsValue(undefined)\`"}
'{"kind":"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}',
stack: `Unhandled Promise Rejection: {"kind":"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: \`JsValue(undefined)\`"}
at unknown (http://localhost:3000/src/lang/std/engineConnection.ts:1245:26)`,
foundInSpec:
'e2e/playwright/onboarding-tests.spec.ts Click through each onboarding step',
@ -253,7 +253,7 @@ export const isErrorWhitelisted = (exception: Error) => {
{
name: '{"kind"',
stack: ``,
message: `engine","sourceRanges":[[0,0]],"msg":"Failed to wait for promise from engine: JsValue(\\"Force interrupt, executionIsStale, new AST requested\\")"}`,
message: `engine","sourceRanges":[[0,0,0]],"msg":"Failed to wait for promise from engine: JsValue(\\"Force interrupt, executionIsStale, new AST requested\\")"}`,
project: 'Google Chrome',
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
},

View File

@ -43,14 +43,14 @@ describe('processMemory', () => {
tag: null,
id: expect.any(String),
faceId: expect.any(String),
sourceRange: [170, 194],
sourceRange: [170, 194, 0],
},
{
type: 'extrudePlane',
tag: null,
id: expect.any(String),
faceId: expect.any(String),
sourceRange: [202, 230],
sourceRange: [202, 230, 0],
},
],
theSketch: [

View File

@ -38,6 +38,7 @@ export class KclManager {
body: [],
start: 0,
end: 0,
moduleId: 0,
nonCodeMeta: {
nonCodeNodes: {},
startNodes: [],
@ -204,6 +205,7 @@ export class KclManager {
body: [],
start: 0,
end: 0,
moduleId: 0,
nonCodeMeta: {
nonCodeNodes: {},
startNodes: [],

View File

@ -1903,6 +1903,6 @@ describe('parsing errors', () => {
const error = result as KCLError
expect(error.kind).toBe('syntax')
expect(error.msg).toBe('Unexpected token: (')
expect(error.sourceRanges).toEqual([[27, 28]])
expect(error.sourceRanges).toEqual([[27, 28, 0]])
})
})

View File

@ -19,7 +19,7 @@ const mySketch001 = startSketchOn('XY')
const sketch001 = execState.memory.get('mySketch001')
expect(sketch001).toEqual({
type: 'UserVal',
__meta: [{ sourceRange: [46, 71] }],
__meta: [{ sourceRange: [46, 71, 0] }],
value: {
type: 'Sketch',
on: expect.any(Object),
@ -29,7 +29,7 @@ const mySketch001 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [46, 71],
sourceRange: [46, 71, 0],
},
},
paths: [
@ -39,7 +39,7 @@ const mySketch001 = startSketchOn('XY')
to: [-1.59, -1.54],
from: [0, 0],
__geoMeta: {
sourceRange: [77, 102],
sourceRange: [77, 102, 0],
id: expect.any(String),
},
},
@ -49,13 +49,13 @@ const mySketch001 = startSketchOn('XY')
from: [-1.59, -1.54],
tag: null,
__geoMeta: {
sourceRange: [108, 132],
sourceRange: [108, 132, 0],
id: expect.any(String),
},
},
],
id: expect.any(String),
__meta: [{ sourceRange: [46, 71] }],
__meta: [{ sourceRange: [46, 71, 0] }],
},
})
})
@ -80,14 +80,14 @@ const mySketch001 = startSketchOn('XY')
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [77, 102],
sourceRange: [77, 102, 0],
},
{
type: 'extrudePlane',
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [108, 132],
sourceRange: [108, 132, 0],
},
],
sketch: {
@ -104,7 +104,7 @@ const mySketch001 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [77, 102],
sourceRange: [77, 102, 0],
},
},
{
@ -114,7 +114,7 @@ const mySketch001 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [108, 132],
sourceRange: [108, 132, 0],
},
},
],
@ -122,7 +122,7 @@ const mySketch001 = startSketchOn('XY')
height: 2,
startCapId: expect.any(String),
endCapId: expect.any(String),
__meta: [{ sourceRange: [46, 71] }],
__meta: [{ sourceRange: [46, 71, 0] }],
})
})
test('sketch extrude and sketch on one of the faces', async () => {
@ -162,7 +162,7 @@ const sk2 = startSketchOn('XY')
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [69, 89],
sourceRange: [69, 89, 0],
},
{
type: 'extrudePlane',
@ -174,14 +174,14 @@ const sk2 = startSketchOn('XY')
value: 'p',
},
id: expect.any(String),
sourceRange: [95, 117],
sourceRange: [95, 117, 0],
},
{
type: 'extrudePlane',
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [123, 142],
sourceRange: [123, 142, 0],
},
],
sketch: {
@ -194,7 +194,7 @@ const sk2 = startSketchOn('XY')
p: {
__meta: [
{
sourceRange: [114, 116],
sourceRange: [114, 116, 0],
},
],
type: 'TagIdentifier',
@ -210,7 +210,7 @@ const sk2 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [69, 89],
sourceRange: [69, 89, 0],
},
},
{
@ -225,7 +225,7 @@ const sk2 = startSketchOn('XY')
},
__geoMeta: {
id: expect.any(String),
sourceRange: [95, 117],
sourceRange: [95, 117, 0],
},
},
{
@ -235,7 +235,7 @@ const sk2 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [123, 142],
sourceRange: [123, 142, 0],
},
},
],
@ -243,7 +243,7 @@ const sk2 = startSketchOn('XY')
height: 2,
startCapId: expect.any(String),
endCapId: expect.any(String),
__meta: [{ sourceRange: [38, 63] }],
__meta: [{ sourceRange: [38, 63, 0] }],
},
{
type: 'Solid',
@ -254,7 +254,7 @@ const sk2 = startSketchOn('XY')
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [373, 393],
sourceRange: [373, 393, 0],
},
{
type: 'extrudePlane',
@ -266,14 +266,14 @@ const sk2 = startSketchOn('XY')
value: 'o',
},
id: expect.any(String),
sourceRange: [399, 420],
sourceRange: [399, 420, 0],
},
{
type: 'extrudePlane',
faceId: expect.any(String),
tag: null,
id: expect.any(String),
sourceRange: [426, 445],
sourceRange: [426, 445, 0],
},
],
sketch: {
@ -286,7 +286,7 @@ const sk2 = startSketchOn('XY')
o: {
__meta: [
{
sourceRange: [417, 419],
sourceRange: [417, 419, 0],
},
],
type: 'TagIdentifier',
@ -302,7 +302,7 @@ const sk2 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [373, 393],
sourceRange: [373, 393, 0],
},
},
{
@ -317,7 +317,7 @@ const sk2 = startSketchOn('XY')
},
__geoMeta: {
id: expect.any(String),
sourceRange: [399, 420],
sourceRange: [399, 420, 0],
},
},
{
@ -327,7 +327,7 @@ const sk2 = startSketchOn('XY')
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [426, 445],
sourceRange: [426, 445, 0],
},
},
],
@ -335,7 +335,7 @@ const sk2 = startSketchOn('XY')
height: 2,
startCapId: expect.any(String),
endCapId: expect.any(String),
__meta: [{ sourceRange: [342, 367] }],
__meta: [{ sourceRange: [342, 367, 0] }],
},
])
})

View File

@ -9,8 +9,8 @@ describe('test kclErrToDiagnostic', () => {
kind: 'semantic',
msg: 'Semantic error',
sourceRanges: [
[0, 1],
[2, 3],
[0, 1, 0],
[2, 3, 0],
],
},
{
@ -19,8 +19,8 @@ describe('test kclErrToDiagnostic', () => {
kind: 'type',
msg: 'Type error',
sourceRanges: [
[4, 5],
[6, 7],
[4, 5, 0],
[6, 7, 0],
],
},
]

View File

@ -4,15 +4,17 @@ import { posToOffset } from '@kittycad/codemirror-lsp-client'
import { Diagnostic as LspDiagnostic } from 'vscode-languageserver-protocol'
import { Text } from '@codemirror/state'
const TOP_LEVEL_MODULE_ID = 0
type ExtractKind<T> = T extends { kind: infer K } ? K : never
export class KCLError extends Error {
kind: ExtractKind<RustKclError> | 'name'
sourceRanges: [number, number][]
sourceRanges: [number, number, number][]
msg: string
constructor(
kind: ExtractKind<RustKclError> | 'name',
msg: string,
sourceRanges: [number, number][]
sourceRanges: [number, number, number][]
) {
super()
this.kind = kind
@ -23,63 +25,63 @@ export class KCLError extends Error {
}
export class KCLLexicalError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('lexical', msg, sourceRanges)
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
}
}
export class KCLInternalError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('internal', msg, sourceRanges)
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
}
}
export class KCLSyntaxError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('syntax', msg, sourceRanges)
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
}
}
export class KCLSemanticError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('semantic', msg, sourceRanges)
Object.setPrototypeOf(this, KCLSemanticError.prototype)
}
}
export class KCLTypeError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('type', msg, sourceRanges)
Object.setPrototypeOf(this, KCLTypeError.prototype)
}
}
export class KCLUnimplementedError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('unimplemented', msg, sourceRanges)
Object.setPrototypeOf(this, KCLUnimplementedError.prototype)
}
}
export class KCLUnexpectedError extends KCLError {
constructor(msg: string, sourceRanges: [number, number][]) {
constructor(msg: string, sourceRanges: [number, number, number][]) {
super('unexpected', msg, sourceRanges)
Object.setPrototypeOf(this, KCLUnexpectedError.prototype)
}
}
export class KCLValueAlreadyDefined extends KCLError {
constructor(key: string, sourceRanges: [number, number][]) {
constructor(key: string, sourceRanges: [number, number, number][]) {
super('name', `Key ${key} was already defined elsewhere`, sourceRanges)
Object.setPrototypeOf(this, KCLValueAlreadyDefined.prototype)
}
}
export class KCLUndefinedValueError extends KCLError {
constructor(key: string, sourceRanges: [number, number][]) {
constructor(key: string, sourceRanges: [number, number, number][]) {
super('name', `Key ${key} has not been defined`, sourceRanges)
Object.setPrototypeOf(this, KCLUndefinedValueError.prototype)
}
@ -97,13 +99,22 @@ export function lspDiagnosticsToKclErrors(
.flatMap(
({ range, message }) =>
new KCLError('unexpected', message, [
[posToOffset(doc, range.start)!, posToOffset(doc, range.end)!],
[
posToOffset(doc, range.start)!,
posToOffset(doc, range.end)!,
TOP_LEVEL_MODULE_ID,
],
])
)
.filter(({ sourceRanges }) => {
const [from, to] = sourceRanges[0]
const [from, to, moduleId] = sourceRanges[0]
return (
from !== null && to !== null && from !== undefined && to !== undefined
from !== null &&
to !== null &&
from !== undefined &&
to !== undefined &&
// Filter out errors that are not from the top-level module.
moduleId === TOP_LEVEL_MODULE_ID
)
})
.sort((a, b) => {
@ -127,8 +138,16 @@ export function kclErrorsToDiagnostics(
errors: KCLError[]
): CodeMirrorDiagnostic[] {
return errors?.flatMap((err) => {
return err.sourceRanges.map(([from, to]) => {
return { from, to, message: err.msg, severity: 'error' }
})
const sourceRanges: CodeMirrorDiagnostic[] = err.sourceRanges
// Filter out errors that are not from the top-level module.
.filter(([_start, _end, moduleId]) => moduleId === TOP_LEVEL_MODULE_ID)
.map(([from, to]) => {
return { from, to, message: err.msg, severity: 'error' }
})
// Make sure we didn't filter out all the source ranges.
if (sourceRanges.length === 0) {
sourceRanges.push({ from: 0, to: 0, message: err.msg, severity: 'error' })
}
return sourceRanges
})
}

View File

@ -65,7 +65,7 @@ const newVar = myVar + 1`
to: [0, 2],
from: [0, 0],
__geoMeta: {
sourceRange: [72, 97],
sourceRange: [72, 97, 0],
id: expect.any(String),
},
tag: {
@ -81,7 +81,7 @@ const newVar = myVar + 1`
from: [0, 2],
tag: null,
__geoMeta: {
sourceRange: [103, 119],
sourceRange: [103, 119, 0],
id: expect.any(String),
},
},
@ -90,7 +90,7 @@ const newVar = myVar + 1`
to: [5, -1],
from: [2, 3],
__geoMeta: {
sourceRange: [125, 154],
sourceRange: [125, 154, 0],
id: expect.any(String),
},
tag: {
@ -160,14 +160,14 @@ const newVar = myVar + 1`
tag: null,
__geoMeta: {
id: expect.any(String),
sourceRange: [39, 63],
sourceRange: [39, 63, 0],
},
},
tags: {
myPath: {
__meta: [
{
sourceRange: [109, 116],
sourceRange: [109, 116, 0],
},
],
type: 'TagIdentifier',
@ -182,7 +182,7 @@ const newVar = myVar + 1`
from: [0, 0],
tag: null,
__geoMeta: {
sourceRange: [69, 85],
sourceRange: [69, 85, 0],
id: expect.any(String),
},
},
@ -191,7 +191,7 @@ const newVar = myVar + 1`
to: [0, 1],
from: [1, 1],
__geoMeta: {
sourceRange: [91, 117],
sourceRange: [91, 117, 0],
id: expect.any(String),
},
tag: {
@ -207,15 +207,15 @@ const newVar = myVar + 1`
from: [0, 1],
tag: null,
__geoMeta: {
sourceRange: [123, 139],
sourceRange: [123, 139, 0],
id: expect.any(String),
},
},
],
id: expect.any(String),
__meta: [{ sourceRange: [39, 63] }],
__meta: [{ sourceRange: [39, 63, 0] }],
},
__meta: [{ sourceRange: [39, 63] }],
__meta: [{ sourceRange: [39, 63, 0] }],
})
})
it('execute array expression', async () => {
@ -229,7 +229,7 @@ const newVar = myVar + 1`
value: 3,
__meta: [
{
sourceRange: [14, 15],
sourceRange: [14, 15, 0],
},
],
})
@ -238,7 +238,7 @@ const newVar = myVar + 1`
value: [1, '2', 3, 9],
__meta: [
{
sourceRange: [27, 49],
sourceRange: [27, 49, 0],
},
],
})
@ -257,7 +257,7 @@ const newVar = myVar + 1`
value: { aStr: 'str', anum: 2, identifier: 3, binExp: 9 },
__meta: [
{
sourceRange: [27, 83],
sourceRange: [27, 83, 0],
},
],
})
@ -272,7 +272,7 @@ const newVar = myVar + 1`
value: '123',
__meta: [
{
sourceRange: [41, 50],
sourceRange: [41, 50, 0],
},
],
})
@ -426,7 +426,7 @@ const theExtrude = startSketchOn('XY')
new KCLError(
'undefined_value',
'memory item key `myVarZ` is not defined',
[[129, 135]]
[[129, 135, 0]]
)
)
})

View File

@ -101,15 +101,15 @@ describe('Testing findUniqueName', () => {
it('should find a unique name', () => {
const result = findUniqueName(
JSON.stringify([
{ type: 'Identifier', name: 'yo01', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo02', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo03', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo04', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo05', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo06', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo07', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo08', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo09', start: 0, end: 0 },
{ type: 'Identifier', name: 'yo01', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo02', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo03', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo04', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo05', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo06', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo07', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo08', start: 0, end: 0, moduleId: 0 },
{ type: 'Identifier', name: 'yo09', start: 0, end: 0, moduleId: 0 },
] satisfies Node<Identifier>[]),
'yo',
2
@ -124,6 +124,7 @@ describe('Testing addSketchTo', () => {
body: [],
start: 0,
end: 0,
moduleId: 0,
nonCodeMeta: { nonCodeNodes: {}, startNodes: [] },
},
'yz'

View File

@ -242,6 +242,7 @@ export function mutateObjExpProp(
value: updateWith,
start: 0,
end: 0,
moduleId: 0,
})
}
}
@ -577,6 +578,7 @@ export function createLiteral(value: string | number): Node<Literal> {
type: 'Literal',
start: 0,
end: 0,
moduleId: 0,
value,
raw: `${value}`,
}
@ -587,6 +589,7 @@ export function createTagDeclarator(value: string): Node<TagDeclarator> {
type: 'TagDeclarator',
start: 0,
end: 0,
moduleId: 0,
value,
}
@ -597,6 +600,7 @@ export function createIdentifier(name: string): Node<Identifier> {
type: 'Identifier',
start: 0,
end: 0,
moduleId: 0,
name,
}
@ -607,6 +611,7 @@ export function createPipeSubstitution(): Node<PipeSubstitution> {
type: 'PipeSubstitution',
start: 0,
end: 0,
moduleId: 0,
}
}
@ -618,10 +623,12 @@ export function createCallExpressionStdLib(
type: 'CallExpression',
start: 0,
end: 0,
moduleId: 0,
callee: {
type: 'Identifier',
start: 0,
end: 0,
moduleId: 0,
name,
},
@ -638,10 +645,12 @@ export function createCallExpression(
type: 'CallExpression',
start: 0,
end: 0,
moduleId: 0,
callee: {
type: 'Identifier',
start: 0,
end: 0,
moduleId: 0,
name,
},
@ -657,6 +666,7 @@ export function createArrayExpression(
type: 'ArrayExpression',
start: 0,
end: 0,
moduleId: 0,
nonCodeMeta: nonCodeMetaEmpty(),
elements,
@ -670,6 +680,7 @@ export function createPipeExpression(
type: 'PipeExpression',
start: 0,
end: 0,
moduleId: 0,
body,
nonCodeMeta: nonCodeMetaEmpty(),
@ -686,12 +697,14 @@ export function createVariableDeclaration(
type: 'VariableDeclaration',
start: 0,
end: 0,
moduleId: 0,
declarations: [
{
type: 'VariableDeclarator',
start: 0,
end: 0,
moduleId: 0,
id: createIdentifier(varName),
init,
@ -709,12 +722,14 @@ export function createObjectExpression(properties: {
type: 'ObjectExpression',
start: 0,
end: 0,
moduleId: 0,
nonCodeMeta: nonCodeMetaEmpty(),
properties: Object.entries(properties).map(([key, value]) => ({
type: 'ObjectProperty',
start: 0,
end: 0,
moduleId: 0,
key: createIdentifier(key),
value,
@ -730,6 +745,7 @@ export function createUnaryExpression(
type: 'UnaryExpression',
start: 0,
end: 0,
moduleId: 0,
operator,
argument,
@ -745,6 +761,7 @@ export function createBinaryExpression([left, operator, right]: [
type: 'BinaryExpression',
start: 0,
end: 0,
moduleId: 0,
operator,
left,

View File

@ -13,6 +13,7 @@ Map {
"range": [
37,
64,
0,
],
},
"pathIds": [
@ -31,6 +32,7 @@ Map {
"range": [
37,
64,
0,
],
},
"planeId": "UUID",
@ -56,6 +58,7 @@ Map {
"range": [
70,
86,
0,
],
},
"edgeIds": [
@ -77,6 +80,7 @@ Map {
"range": [
92,
119,
0,
],
},
"edgeCutId": "UUID",
@ -99,6 +103,7 @@ Map {
"range": [
125,
150,
0,
],
},
"edgeIds": [
@ -120,6 +125,7 @@ Map {
"range": [
156,
203,
0,
],
},
"edgeIds": [
@ -141,6 +147,7 @@ Map {
"range": [
209,
217,
0,
],
},
"edgeIds": [],
@ -162,6 +169,7 @@ Map {
"range": [
231,
254,
0,
],
},
"edgeIds": [
@ -289,6 +297,7 @@ Map {
"range": [
260,
299,
0,
],
},
"consumedEdgeId": "UUID",
@ -307,6 +316,7 @@ Map {
"range": [
350,
377,
0,
],
},
"planeId": "UUID",
@ -331,6 +341,7 @@ Map {
"range": [
383,
398,
0,
],
},
"edgeIds": [
@ -352,6 +363,7 @@ Map {
"range": [
404,
420,
0,
],
},
"edgeIds": [
@ -373,6 +385,7 @@ Map {
"range": [
426,
473,
0,
],
},
"edgeIds": [
@ -394,6 +407,7 @@ Map {
"range": [
479,
487,
0,
],
},
"edgeIds": [],
@ -415,6 +429,7 @@ Map {
"range": [
501,
522,
0,
],
},
"edgeIds": [

View File

@ -610,7 +610,7 @@ describe('testing getArtifactsToUpdate', () => {
sweepId: '',
codeRef: {
pathToNode: [['body', '']],
range: [37, 64],
range: [37, 64, 0],
},
},
])
@ -622,7 +622,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: [],
edgeIds: [],
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -632,7 +632,7 @@ describe('testing getArtifactsToUpdate', () => {
planeId: expect.any(String),
sweepId: expect.any(String),
codeRef: {
range: [37, 64],
range: [37, 64, 0],
pathToNode: [['body', '']],
},
solid2dId: expect.any(String),
@ -645,7 +645,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: '',
edgeIds: [],
codeRef: {
range: [70, 86],
range: [70, 86, 0],
pathToNode: [['body', '']],
},
},
@ -655,7 +655,7 @@ describe('testing getArtifactsToUpdate', () => {
planeId: expect.any(String),
sweepId: expect.any(String),
codeRef: {
range: [37, 64],
range: [37, 64, 0],
pathToNode: [['body', '']],
},
solid2dId: expect.any(String),
@ -669,7 +669,7 @@ describe('testing getArtifactsToUpdate', () => {
edgeIds: [],
surfaceId: '',
codeRef: {
range: [260, 299],
range: [260, 299, 0],
pathToNode: [['body', '']],
},
},
@ -679,7 +679,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: expect.any(String),
edgeIds: expect.any(Array),
codeRef: {
range: [92, 119],
range: [92, 119, 0],
pathToNode: [['body', '']],
},
edgeCutId: expect.any(String),
@ -699,7 +699,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: expect.any(String),
edgeIds: expect.any(Array),
codeRef: {
range: [156, 203],
range: [156, 203, 0],
pathToNode: [['body', '']],
},
},
@ -710,7 +710,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -727,7 +727,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: expect.any(String),
edgeIds: expect.any(Array),
codeRef: {
range: [125, 150],
range: [125, 150, 0],
pathToNode: [['body', '']],
},
},
@ -738,7 +738,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -755,7 +755,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: expect.any(String),
edgeIds: expect.any(Array),
codeRef: {
range: [92, 119],
range: [92, 119, 0],
pathToNode: [['body', '']],
},
edgeCutId: expect.any(String),
@ -767,7 +767,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -784,7 +784,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceId: expect.any(String),
edgeIds: expect.any(Array),
codeRef: {
range: [70, 86],
range: [70, 86, 0],
pathToNode: [['body', '']],
},
},
@ -795,7 +795,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -813,7 +813,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},
@ -831,7 +831,7 @@ describe('testing getArtifactsToUpdate', () => {
surfaceIds: expect.any(Array),
edgeIds: expect.any(Array),
codeRef: {
range: [231, 254],
range: [231, 254, 0],
pathToNode: [['body', '']],
},
},

View File

@ -1823,11 +1823,13 @@ export const updateStartProfileAtArgs: SketchLineHelper['updateArgs'] = ({
modifiedAst: {
start: 0,
end: 0,
moduleId: 0,
body: [],
nonCodeMeta: {
start: 0,
end: 0,
moduleId: 0,
startNodes: [],
nonCodeNodes: [],
},

View File

@ -120,8 +120,8 @@ const initialise = async () => {
export const initPromise = initialise()
export const rangeTypeFix = (ranges: number[][]): [number, number][] =>
ranges.map(([start, end]) => [start, end])
export const rangeTypeFix = (ranges: number[][]): [number, number, number][] =>
ranges.map(([start, end, moduleId]) => [start, end, moduleId])
export const parse = (code: string | Error): Node<Program> | Error => {
if (err(code)) return code

View File

@ -3083,6 +3083,7 @@ dependencies = [
"chrono",
"dyn-clone",
"indexmap 1.9.3",
"indexmap 2.6.0",
"schemars_derive",
"serde",
"serde_json",
@ -3883,6 +3884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a2f31991cee3dce1ca4f929a8a04fdd11fd8801aac0f2030b0fa8a0a3fef6b9"
dependencies = [
"chrono",
"indexmap 2.6.0",
"lazy_static",
"serde_json",
"thiserror 1.0.68",

View File

@ -173,9 +173,7 @@ fn do_stdlib_inner(
quote! {
let code_blocks = vec![#(#cb),*];
code_blocks.iter().map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
@ -750,9 +748,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
quote! {
#[tokio::test(flavor = "multi_thread")]
async fn #test_name_mock() {
let tokens = crate::token::lexer(#code_block).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(#code_block).unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await.unwrap())),

View File

@ -2,9 +2,7 @@
mod test_examples_someFn {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_someFn0() {
let tokens = crate::token::lexer("someFn()").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse("someFn()").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +111,7 @@ impl crate::docs::StdLibFn for SomeFn {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,7 @@
mod test_examples_someFn {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_someFn0() {
let tokens = crate::token::lexer("someFn()").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse("someFn()").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +111,7 @@ impl crate::docs::StdLibFn for SomeFn {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,9 @@
mod test_examples_show {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_show0() {
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nshow")
.unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -36,9 +36,8 @@ mod test_examples_show {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_show1() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -150,9 +149,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_show {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_show0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,10 +2,9 @@
mod test_examples_my_func {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_my_func0() {
let tokens =
crate::token::lexer("This is another code block.\nyes sirrr.\nmyFunc").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nmyFunc")
.unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -37,9 +36,8 @@ mod test_examples_my_func {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_my_func1() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmyFunc").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nmyFunc").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -151,9 +149,7 @@ impl crate::docs::StdLibFn for MyFunc {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,10 +2,9 @@
mod test_examples_line_to {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_line_to0() {
let tokens =
crate::token::lexer("This is another code block.\nyes sirrr.\nlineTo").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nlineTo")
.unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -37,9 +36,8 @@ mod test_examples_line_to {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_line_to1() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nlineTo").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nlineTo").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -159,9 +157,7 @@ impl crate::docs::StdLibFn for LineTo {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_min {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_min0() {
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nmin").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nmin").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -36,9 +35,8 @@ mod test_examples_min {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_min1() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmin").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nmin").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -150,9 +148,7 @@ impl crate::docs::StdLibFn for Min {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_show {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_show0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_import {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_import0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_import {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_import0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_import {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_import0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,8 @@
mod test_examples_show {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_show0() {
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program =
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -2,9 +2,7 @@
mod test_examples_some_function {
#[tokio::test(flavor = "multi_thread")]
async fn test_mock_example_some_function0() {
let tokens = crate::token::lexer("someFunction()").unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse("someFunction()").unwrap();
let id_generator = crate::executor::IdGenerator::default();
let ctx = crate::executor::ExecutorContext {
engine: std::sync::Arc::new(Box::new(
@ -108,9 +106,7 @@ impl crate::docs::StdLibFn for SomeFunction {
code_blocks
.iter()
.map(|cb| {
let tokens = crate::token::lexer(cb).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(cb).unwrap();
let mut options: crate::ast::types::FormatOptions = Default::default();
options.insert_final_newline = false;
program.recast(&options, 0)

View File

@ -16,8 +16,7 @@ use syn::{parse_macro_input, LitStr};
pub fn parse(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as LitStr);
let kcl_src = input.value();
let tokens = kcl_lib::token::lexer(&kcl_src).unwrap();
let ast = kcl_lib::parser::Parser::new(tokens).ast().unwrap();
let ast = kcl_lib::parser::top_level_parse(&kcl_src).unwrap();
let ast_struct = ast.bake(&Default::default());
quote!(#ast_struct).into()
}

View File

@ -1,6 +1,6 @@
extern crate alloc;
use kcl_lib::ast::types::{
BodyItem, Expr, Identifier, ItemVisibility, Literal, LiteralValue, Node, Program, VariableDeclaration,
BodyItem, Expr, Identifier, ItemVisibility, Literal, LiteralValue, ModuleId, Node, Program, VariableDeclaration,
VariableDeclarator, VariableKind,
};
use kcl_macros::parse;
@ -9,6 +9,7 @@ use pretty_assertions::assert_eq;
#[test]
fn basic() {
let actual = parse!("const y = 4");
let module_id = ModuleId::default();
let expected = Node {
inner: Program {
body: vec![BodyItem::VariableDeclaration(Box::new(Node::new(
@ -22,6 +23,7 @@ fn basic() {
},
6,
7,
module_id,
),
init: Expr::Literal(Box::new(Node::new(
Literal {
@ -31,11 +33,13 @@ fn basic() {
},
10,
11,
module_id,
))),
digest: None,
},
6,
11,
module_id,
)],
visibility: ItemVisibility::Default,
kind: VariableKind::Const,
@ -43,12 +47,14 @@ fn basic() {
},
0,
11,
module_id,
)))],
non_code_meta: Default::default(),
digest: None,
},
start: 0,
end: 11,
module_id,
};
assert_eq!(expected, actual);
}

View File

@ -15,7 +15,7 @@ use hyper::{
service::{make_service_fn, service_fn},
Body, Error, Response, Server,
};
use kcl_lib::{executor::ExecutorContext, settings::types::UnitLength, test_server::RequestBody};
use kcl_lib::{ast::types::ModuleId, executor::ExecutorContext, settings::types::UnitLength, test_server::RequestBody};
use tokio::{
sync::{mpsc, oneshot},
task::JoinHandle,
@ -157,7 +157,8 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
Err(e) => return bad_request(format!("Invalid request JSON: {e}")),
};
let RequestBody { kcl_program, test_name } = body;
let parser = match kcl_lib::token::lexer(&kcl_program) {
let module_id = ModuleId::default();
let parser = match kcl_lib::token::lexer(&kcl_program, module_id) {
Ok(ts) => kcl_lib::parser::Parser::new(ts),
Err(e) => return bad_request(format!("tokenization error: {e}")),
};

View File

@ -7,9 +7,7 @@ mod conn_mock_core;
///Converts the given kcl code to an engine test
pub async fn kcl_to_engine_core(code: &str) -> Result<String> {
let tokens = kcl_lib::token::lexer(code)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
let program = kcl_lib::parser::top_level_parse(code)?;
let result = Arc::new(Mutex::new("".into()));
let ref_result = Arc::clone(&result);

View File

@ -37,14 +37,14 @@ parse-display = "0.9.1"
pyo3 = { version = "0.22.6", optional = true }
reqwest = { version = "0.12", default-features = false, features = ["stream", "rustls-tls"] }
ropey = "1.6.1"
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1", "preserve_order"] }
schemars = { version = "0.8.17", features = ["impl_json_schema", "indexmap2", "url", "uuid1", "preserve_order"] }
serde = { version = "1.0.214", features = ["derive"] }
serde_json = "1.0.128"
sha2 = "0.10.8"
tabled = { version = "0.15.0", optional = true }
thiserror = "2.0.0"
toml = "0.8.19"
ts-rs = { version = "10.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
ts-rs = { version = "10.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "indexmap-impl", "no-serde-warnings", "serde-json-impl"] }
url = { version = "2.5.3", features = ["serde"] }
urlencoding = "2.1.3"
uuid = { version = "1.11.0", features = ["v4", "js", "serde"] }

View File

@ -1,9 +1,10 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
pub fn bench_lex(c: &mut Criterion) {
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
let module_id = kcl_lib::ast::types::ModuleId::default();
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM, module_id)));
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM, module_id)));
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM, module_id)));
}
pub fn bench_parse(c: &mut Criterion) {
@ -15,7 +16,8 @@ pub fn bench_parse(c: &mut Criterion) {
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
("koch snowflake", LSYSTEM_KOCH_SNOWFLAKE_PROGRAM),
] {
let tokens = kcl_lib::token::lexer(file).unwrap();
let module_id = kcl_lib::ast::types::ModuleId::default();
let tokens = kcl_lib::token::lexer(file, module_id).unwrap();
c.bench_function(&format!("parse_{name}"), move |b| {
let tok = tokens.clone();
b.iter(move || {
@ -26,8 +28,8 @@ pub fn bench_parse(c: &mut Criterion) {
}
}
fn lex(program: &str) {
black_box(kcl_lib::token::lexer(program).unwrap());
fn lex(program: &str, module_id: kcl_lib::ast::types::ModuleId) {
black_box(kcl_lib::token::lexer(program, module_id).unwrap());
}
criterion_group!(benches, bench_lex, bench_parse);

View File

@ -1,26 +1,32 @@
use iai::black_box;
pub fn parse(program: &str) {
let tokens = kcl_lib::token::lexer(program).unwrap();
let module_id = kcl_lib::ast::types::ModuleId::default();
let tokens = kcl_lib::token::lexer(program, module_id).unwrap();
let tok = tokens.clone();
let parser = kcl_lib::parser::Parser::new(tok.clone());
black_box(parser.ast().unwrap());
}
fn lex_kitt() {
black_box(kcl_lib::token::lexer(KITT_PROGRAM).unwrap());
let module_id = kcl_lib::ast::types::ModuleId::default();
black_box(kcl_lib::token::lexer(KITT_PROGRAM, module_id).unwrap());
}
fn lex_pipes() {
black_box(kcl_lib::token::lexer(PIPES_PROGRAM).unwrap());
let module_id = kcl_lib::ast::types::ModuleId::default();
black_box(kcl_lib::token::lexer(PIPES_PROGRAM, module_id).unwrap());
}
fn lex_cube() {
black_box(kcl_lib::token::lexer(CUBE_PROGRAM).unwrap());
let module_id = kcl_lib::ast::types::ModuleId::default();
black_box(kcl_lib::token::lexer(CUBE_PROGRAM, module_id).unwrap());
}
fn lex_math() {
black_box(kcl_lib::token::lexer(MATH_PROGRAM).unwrap());
let module_id = kcl_lib::ast::types::ModuleId::default();
black_box(kcl_lib::token::lexer(MATH_PROGRAM, module_id).unwrap());
}
fn lex_lsystem() {
black_box(kcl_lib::token::lexer(LSYSTEM_PROGRAM).unwrap());
let module_id = kcl_lib::ast::types::ModuleId::default();
black_box(kcl_lib::token::lexer(LSYSTEM_PROGRAM, module_id).unwrap());
}
fn parse_kitt() {

View File

@ -9,8 +9,7 @@ pub fn bench_digest(c: &mut Criterion) {
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
("lsystem", LSYSTEM_PROGRAM),
] {
let tokens = kcl_lib::token::lexer(file).unwrap();
let prog = kcl_lib::parser::Parser::new(tokens).ast().unwrap();
let prog = kcl_lib::parser::top_level_parse(file).unwrap();
c.bench_function(&format!("digest_{name}"), move |b| {
let prog = prog.clone();

View File

@ -16,7 +16,7 @@ use crate::{
executor::{Point2d, SourceRange},
};
use super::types::Node;
use super::types::{ModuleId, Node};
type Point3d = kcmc::shared::Point3d<f64>;
@ -38,6 +38,7 @@ const EPSILON: f64 = 0.015625; // or 2^-6
pub async fn modify_ast_for_sketch(
engine: &Arc<Box<dyn EngineManager>>,
program: &mut Node<Program>,
module_id: ModuleId,
// The name of the sketch.
sketch_name: &str,
// The type of plane the sketch is on. `XY` or `XZ`, etc
@ -183,9 +184,7 @@ pub async fn modify_ast_for_sketch(
let recasted = program.recast(&FormatOptions::default(), 0);
// Re-parse the ast so we get the correct source ranges.
let tokens = crate::token::lexer(&recasted)?;
let parser = crate::parser::Parser::new(tokens);
*program = parser.ast()?;
*program = crate::parser::parse(&recasted, module_id)?;
Ok(recasted)
}

View File

@ -37,6 +37,7 @@ pub(crate) mod digest;
pub(crate) mod execute;
mod literal_value;
mod none;
pub(crate) mod source_range;
use digest::Digest;
@ -48,11 +49,14 @@ pub enum Definition<'a> {
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, Bake)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct Node<T> {
#[serde(flatten)]
pub inner: T,
pub start: usize,
pub end: usize,
#[serde(default, skip_serializing_if = "ModuleId::is_top_level")]
pub module_id: ModuleId,
}
impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
@ -78,8 +82,13 @@ impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
}
impl<T> Node<T> {
pub fn new(inner: T, start: usize, end: usize) -> Self {
Self { inner, start, end }
pub fn new(inner: T, start: usize, end: usize, module_id: ModuleId) -> Self {
Self {
inner,
start,
end,
module_id,
}
}
pub fn no_src(inner: T) -> Self {
@ -87,15 +96,21 @@ impl<T> Node<T> {
inner,
start: 0,
end: 0,
module_id: ModuleId::default(),
}
}
pub fn boxed(inner: T, start: usize, end: usize) -> BoxNode<T> {
Box::new(Node { inner, start, end })
pub fn boxed(inner: T, start: usize, end: usize, module_id: ModuleId) -> BoxNode<T> {
Box::new(Node {
inner,
start,
end,
module_id,
})
}
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
vec![SourceRange([self.start, self.end])]
vec![SourceRange([self.start, self.end, self.module_id.as_usize()])]
}
}
@ -121,19 +136,19 @@ impl<T: fmt::Display> fmt::Display for Node<T> {
impl<T> From<Node<T>> for crate::executor::SourceRange {
fn from(v: Node<T>) -> Self {
Self([v.start, v.end])
Self([v.start, v.end, v.module_id.as_usize()])
}
}
impl<T> From<&Node<T>> for crate::executor::SourceRange {
fn from(v: &Node<T>) -> Self {
Self([v.start, v.end])
Self([v.start, v.end, v.module_id.as_usize()])
}
}
impl<T> From<&BoxNode<T>> for crate::executor::SourceRange {
fn from(v: &BoxNode<T>) -> Self {
Self([v.start, v.end])
Self([v.start, v.end, v.module_id.as_usize()])
}
}
@ -505,6 +520,29 @@ impl Program {
}
}
/// Identifier of a source file. Uses a u32 to keep the size small.
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize, ts_rs::TS, JsonSchema, Bake)]
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
pub struct ModuleId(pub u32);
impl ModuleId {
pub fn from_usize(id: usize) -> Self {
Self(u32::try_from(id).expect("module ID should fit in a u32"))
}
pub fn as_usize(&self) -> usize {
usize::try_from(self.0).expect("module ID should fit in a usize")
}
/// Top-level file is the one being executed.
/// Represented by module ID of 0, i.e. the default value.
pub fn is_top_level(&self) -> bool {
*self == Self::default()
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
#[databake(path = kcl_lib::ast::types)]
#[ts(export)]
@ -538,13 +576,13 @@ impl BodyItem {
impl From<BodyItem> for SourceRange {
fn from(item: BodyItem) -> Self {
Self([item.start(), item.end()])
Self([item.start(), item.end(), item.module_id().as_usize()])
}
}
impl From<&BodyItem> for SourceRange {
fn from(item: &BodyItem) -> Self {
Self([item.start(), item.end()])
Self([item.start(), item.end(), item.module_id().as_usize()])
}
}
@ -568,7 +606,7 @@ pub enum Expr {
MemberExpression(BoxNode<MemberExpression>),
UnaryExpression(BoxNode<UnaryExpression>),
IfExpression(BoxNode<IfExpression>),
None(KclNone),
None(Node<KclNone>),
}
impl Expr {
@ -758,13 +796,13 @@ impl Expr {
impl From<Expr> for SourceRange {
fn from(value: Expr) -> Self {
Self([value.start(), value.end()])
Self([value.start(), value.end(), value.module_id().as_usize()])
}
}
impl From<&Expr> for SourceRange {
fn from(value: &Expr) -> Self {
Self([value.start(), value.end()])
Self([value.start(), value.end(), value.module_id().as_usize()])
}
}
@ -784,13 +822,13 @@ pub enum BinaryPart {
impl From<BinaryPart> for SourceRange {
fn from(value: BinaryPart) -> Self {
Self([value.start(), value.end()])
Self([value.start(), value.end(), value.module_id().as_usize()])
}
}
impl From<&BinaryPart> for SourceRange {
fn from(value: &BinaryPart) -> Self {
Self([value.start(), value.end()])
Self([value.start(), value.end(), value.module_id().as_usize()])
}
}
@ -2154,13 +2192,13 @@ impl MemberObject {
impl From<MemberObject> for SourceRange {
fn from(obj: MemberObject) -> Self {
Self([obj.start(), obj.end()])
Self([obj.start(), obj.end(), obj.module_id().as_usize()])
}
}
impl From<&MemberObject> for SourceRange {
fn from(obj: &MemberObject) -> Self {
Self([obj.start(), obj.end()])
Self([obj.start(), obj.end(), obj.module_id().as_usize()])
}
}
@ -2191,13 +2229,13 @@ impl LiteralIdentifier {
impl From<LiteralIdentifier> for SourceRange {
fn from(id: LiteralIdentifier) -> Self {
Self([id.start(), id.end()])
Self([id.start(), id.end(), id.module_id().as_usize()])
}
}
impl From<&LiteralIdentifier> for SourceRange {
fn from(id: &LiteralIdentifier) -> Self {
Self([id.start(), id.end()])
Self([id.start(), id.end(), id.module_id().as_usize()])
}
}
@ -3018,9 +3056,7 @@ fn ghi = (x) => {
ghi("things")
"#;
let tokens = crate::token::lexer(code).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(code).unwrap();
let folding_ranges = program.get_lsp_folding_ranges();
assert_eq!(folding_ranges.len(), 3);
assert_eq!(folding_ranges[0].start_line, 29);
@ -3056,9 +3092,7 @@ fn ghi = (x) => {
return x
}
"#;
let tokens = crate::token::lexer(code).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(code).unwrap();
let symbols = program.get_lsp_symbols(code).unwrap();
assert_eq!(symbols.len(), 7);
}
@ -3078,9 +3112,7 @@ const cylinder = startSketchOn('-XZ')
}, %)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let value = program.get_non_code_meta_for_position(50);
@ -3103,9 +3135,7 @@ const cylinder = startSketchOn('-XZ')
}, %)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let value = program.get_non_code_meta_for_position(124);
@ -3118,9 +3148,7 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0,0], %)
|> xLine(5, %) // lin
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let value = program.get_non_code_meta_for_position(86);
@ -3132,9 +3160,7 @@ const cylinder = startSketchOn('-XZ')
let some_program_string = r#"fn thing = (arg0: number, arg1: string, tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
// Check the program output for the types of the parameters.
let function = program.body.first().unwrap();
@ -3156,9 +3182,7 @@ const cylinder = startSketchOn('-XZ')
let some_program_string = r#"fn thing = (arg0: number[], arg1: string[], tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
// Check the program output for the types of the parameters.
let function = program.body.first().unwrap();
@ -3180,9 +3204,8 @@ const cylinder = startSketchOn('-XZ')
let some_program_string = r#"fn thing = (arg0: number[], arg1: {thing: number, things: string[], more?: string}, tag?: string) => {
return arg0
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let module_id = ModuleId::default();
let program = crate::parser::parse(some_program_string, module_id).unwrap();
// Check the program output for the types of the parameters.
let function = program.body.first().unwrap();
@ -3207,6 +3230,7 @@ const cylinder = startSketchOn('-XZ')
},
35,
40,
module_id,
),
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
optional: false,
@ -3220,6 +3244,7 @@ const cylinder = startSketchOn('-XZ')
},
50,
56,
module_id,
),
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
optional: false,
@ -3233,6 +3258,7 @@ const cylinder = startSketchOn('-XZ')
},
68,
72,
module_id,
),
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
optional: true,
@ -3249,9 +3275,8 @@ const cylinder = startSketchOn('-XZ')
let some_program_string = r#"fn thing = () => {thing: number, things: string[], more?: string} {
return 1
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let module_id = ModuleId::default();
let program = crate::parser::parse(some_program_string, module_id).unwrap();
// Check the program output for the types of the parameters.
let function = program.body.first().unwrap();
@ -3275,6 +3300,7 @@ const cylinder = startSketchOn('-XZ')
},
18,
23,
module_id,
),
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
optional: false,
@ -3288,6 +3314,7 @@ const cylinder = startSketchOn('-XZ')
},
33,
39,
module_id,
),
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
optional: false,
@ -3301,6 +3328,7 @@ const cylinder = startSketchOn('-XZ')
},
51,
55,
module_id,
),
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
optional: true,
@ -3349,6 +3377,7 @@ const cylinder = startSketchOn('-XZ')
},
start: 0,
end: 0,
module_id: ModuleId::default(),
},
return_type: None,
digest: None,
@ -3375,6 +3404,7 @@ const cylinder = startSketchOn('-XZ')
},
start: 0,
end: 0,
module_id: ModuleId::default(),
},
return_type: None,
digest: None,
@ -3412,6 +3442,7 @@ const cylinder = startSketchOn('-XZ')
},
start: 0,
end: 0,
module_id: ModuleId::default(),
},
return_type: None,
digest: None,
@ -3429,9 +3460,7 @@ const cylinder = startSketchOn('-XZ')
#[tokio::test(flavor = "multi_thread")]
async fn test_parse_object_bool() {
let some_program_string = r#"some_func({thing: true, other_thing: false})"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
// We want to get the bool and verify it is a bool.
@ -3479,14 +3508,12 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0, 0], %)
|> line([5, 5], %, $xLine)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let result = crate::parser::top_level_parse(some_program_string);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([76, 82])], message: "Cannot assign a tag to a reserved keyword: xLine" }"#
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([76, 82, 0])], message: "Cannot assign a tag to a reserved keyword: xLine" }"#
);
}
@ -3496,14 +3523,12 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0, 0], %)
|> line([5, 5], %, $)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let result = crate::parser::top_level_parse(some_program_string);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([57, 59])], message: "Unexpected token: |>" }"#
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([57, 59, 0])], message: "Unexpected token: |>" }"#
);
}
@ -3513,17 +3538,13 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0, 0], %)
|> line([5, 5], %)
"#;
let prog1_tokens = crate::token::lexer(prog1_string).unwrap();
let prog1_parser = crate::parser::Parser::new(prog1_tokens);
let prog1_digest = prog1_parser.ast().unwrap().compute_digest();
let prog1_digest = crate::parser::top_level_parse(prog1_string).unwrap().compute_digest();
let prog2_string = r#"startSketchOn('XY')
|> startProfileAt([0, 2], %)
|> line([5, 5], %)
"#;
let prog2_tokens = crate::token::lexer(prog2_string).unwrap();
let prog2_parser = crate::parser::Parser::new(prog2_tokens);
let prog2_digest = prog2_parser.ast().unwrap().compute_digest();
let prog2_digest = crate::parser::top_level_parse(prog2_string).unwrap().compute_digest();
assert!(prog1_digest != prog2_digest);
@ -3531,9 +3552,7 @@ const cylinder = startSketchOn('-XZ')
|> startProfileAt([0, 0], %)
|> line([5, 5], %)
"#;
let prog3_tokens = crate::token::lexer(prog3_string).unwrap();
let prog3_parser = crate::parser::Parser::new(prog3_tokens);
let prog3_digest = prog3_parser.ast().unwrap().compute_digest();
let prog3_digest = crate::parser::top_level_parse(prog3_string).unwrap().compute_digest();
assert_eq!(prog1_digest, prog3_digest);
}

View File

@ -50,7 +50,7 @@ impl Node<IfExpression> {
impl Node<ElseIf> {
#[allow(dead_code)]
fn source_ranges(&self) -> Vec<SourceRange> {
vec![SourceRange([self.start, self.end])]
vec![SourceRange([self.start, self.end, self.module_id.as_usize()])]
}
}

View File

@ -235,7 +235,7 @@ pub(crate) async fn execute_pipe_body(
// they use the % from the parent. After all, this pipe expression hasn't been executed yet, so it doesn't have any % value
// of its own.
let meta = Metadata {
source_range: SourceRange([first.start(), first.end()]),
source_range: SourceRange::from(first),
};
let output = ctx
.execute_expr(first, exec_state, &meta, StatementKind::Expression)
@ -285,7 +285,7 @@ async fn inner_execute_pipe_body(
| Expr::None(_) => {}
};
let metadata = Metadata {
source_range: SourceRange([expression.start(), expression.end()]),
source_range: SourceRange::from(expression),
};
let output = ctx
.execute_expr(expression, exec_state, &metadata, StatementKind::Expression)

View File

@ -6,9 +6,11 @@ use serde::{Deserialize, Serialize};
use crate::{
ast::types::ConstraintLevel,
executor::{KclValue, SourceRange, UserVal},
executor::{KclValue, UserVal},
};
use super::Node;
const KCL_NONE_ID: &str = "KCL_NONE_ID";
/// KCL value for an optional parameter which was not given an argument.
@ -19,9 +21,6 @@ const KCL_NONE_ID: &str = "KCL_NONE_ID";
#[ts(export)]
#[serde(tag = "type")]
pub struct KclNone {
// TODO: Convert this to be an Option<SourceRange>.
pub start: usize,
pub end: usize,
#[serde(deserialize_with = "deser_private")]
#[ts(skip)]
#[schemars(skip)]
@ -29,12 +28,8 @@ pub struct KclNone {
}
impl KclNone {
pub fn new(start: usize, end: usize) -> Self {
Self {
start,
end,
__private: Private {},
}
pub fn new() -> Self {
Self { __private: Private {} }
}
}
@ -63,12 +58,6 @@ where
}
}
impl From<&KclNone> for SourceRange {
fn from(v: &KclNone) -> Self {
Self([v.start, v.end])
}
}
impl From<&KclNone> for UserVal {
fn from(none: &KclNone) -> Self {
UserVal {
@ -85,16 +74,18 @@ impl From<&KclNone> for KclValue {
}
}
impl KclNone {
pub fn source_range(&self) -> SourceRange {
SourceRange([self.start, self.end])
impl From<&Node<KclNone>> for KclValue {
fn from(none: &Node<KclNone>) -> Self {
Self::from(&none.inner)
}
}
impl Node<KclNone> {
/// Get the constraint level.
/// KCL None is never constrained.
pub fn get_constraint_level(&self) -> ConstraintLevel {
ConstraintLevel::None {
source_ranges: vec![self.source_range()],
source_ranges: self.as_source_ranges(),
}
}
}

View File

@ -0,0 +1,66 @@
use super::{BinaryPart, BodyItem, Expr, LiteralIdentifier, MemberObject, ModuleId};
impl BodyItem {
pub fn module_id(&self) -> ModuleId {
match self {
BodyItem::ImportStatement(stmt) => stmt.module_id,
BodyItem::ExpressionStatement(expression_statement) => expression_statement.module_id,
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration.module_id,
BodyItem::ReturnStatement(return_statement) => return_statement.module_id,
}
}
}
impl Expr {
pub fn module_id(&self) -> ModuleId {
match self {
Expr::Literal(literal) => literal.module_id,
Expr::Identifier(identifier) => identifier.module_id,
Expr::TagDeclarator(tag) => tag.module_id,
Expr::BinaryExpression(binary_expression) => binary_expression.module_id,
Expr::FunctionExpression(function_expression) => function_expression.module_id,
Expr::CallExpression(call_expression) => call_expression.module_id,
Expr::PipeExpression(pipe_expression) => pipe_expression.module_id,
Expr::PipeSubstitution(pipe_substitution) => pipe_substitution.module_id,
Expr::ArrayExpression(array_expression) => array_expression.module_id,
Expr::ArrayRangeExpression(array_range) => array_range.module_id,
Expr::ObjectExpression(object_expression) => object_expression.module_id,
Expr::MemberExpression(member_expression) => member_expression.module_id,
Expr::UnaryExpression(unary_expression) => unary_expression.module_id,
Expr::IfExpression(expr) => expr.module_id,
Expr::None(none) => none.module_id,
}
}
}
impl BinaryPart {
pub fn module_id(&self) -> ModuleId {
match self {
BinaryPart::Literal(literal) => literal.module_id,
BinaryPart::Identifier(identifier) => identifier.module_id,
BinaryPart::BinaryExpression(binary_expression) => binary_expression.module_id,
BinaryPart::CallExpression(call_expression) => call_expression.module_id,
BinaryPart::UnaryExpression(unary_expression) => unary_expression.module_id,
BinaryPart::MemberExpression(member_expression) => member_expression.module_id,
BinaryPart::IfExpression(e) => e.module_id,
}
}
}
impl MemberObject {
pub fn module_id(&self) -> ModuleId {
match self {
MemberObject::MemberExpression(member_expression) => member_expression.module_id,
MemberObject::Identifier(identifier) => identifier.module_id,
}
}
}
impl LiteralIdentifier {
pub fn module_id(&self) -> ModuleId {
match self {
LiteralIdentifier::Identifier(identifier) => identifier.module_id,
LiteralIdentifier::Literal(literal) => literal.module_id,
}
}
}

View File

@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
use thiserror::Error;
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
use crate::{executor::SourceRange, lsp::IntoDiagnostic};
use crate::{ast::types::ModuleId, executor::SourceRange, lsp::IntoDiagnostic};
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS, Clone, PartialEq, Eq)]
#[ts(export)]
@ -147,6 +147,13 @@ impl IntoDiagnostic for KclError {
let message = self.get_message();
let source_ranges = self.source_ranges();
// Limit to only errors in the top-level file.
let module_id = ModuleId::default();
let source_ranges = source_ranges
.iter()
.filter(|r| r.module_id() == module_id)
.collect::<Vec<_>>();
Diagnostic {
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
severity: Some(self.severity()),

View File

@ -7,6 +7,7 @@ use std::{
use anyhow::Result;
use async_recursion::async_recursion;
use indexmap::IndexMap;
use kcmc::{
each_cmd as mcmd,
ok_response::{output::TakeSnapshot, OkModelingCmdResponse},
@ -26,8 +27,8 @@ type Point3D = kcmc::shared::Point3d<f64>;
use crate::{
ast::types::{
human_friendly_type, BodyItem, Expr, FunctionExpression, ItemVisibility, KclNone, Node, NodeRef, Program,
TagDeclarator, TagNode,
human_friendly_type, BodyItem, Expr, FunctionExpression, ItemVisibility, KclNone, ModuleId, Node, NodeRef,
Program, TagDeclarator, TagNode,
},
engine::{EngineManager, ExecutionKind},
errors::{KclError, KclErrorDetails},
@ -55,11 +56,32 @@ pub struct ExecState {
/// The stack of import statements for detecting circular module imports.
/// If this is empty, we're not currently executing an import statement.
pub import_stack: Vec<std::path::PathBuf>,
/// Map from source file absolute path to module ID.
pub path_to_source_id: IndexMap<std::path::PathBuf, ModuleId>,
/// Map from module ID to module info.
pub module_infos: IndexMap<ModuleId, ModuleInfo>,
/// The directory of the current project. This is used for resolving import
/// paths. If None is given, the current working directory is used.
pub project_directory: Option<String>,
}
impl ExecState {
pub fn add_module(&mut self, path: std::path::PathBuf) -> ModuleId {
// Need to avoid borrowing self in the closure.
let new_module_id = ModuleId::from_usize(self.path_to_source_id.len());
let mut is_new = false;
let id = *self.path_to_source_id.entry(path.clone()).or_insert_with(|| {
is_new = true;
new_module_id
});
if is_new {
let module_info = ModuleInfo { id, path };
self.module_infos.insert(id, module_info);
}
id
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
@ -1373,21 +1395,33 @@ pub enum BodyType {
Block,
}
/// Info about a module. Right now, this is pretty minimal. We hope to cache
/// modules here in the future.
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize, ts_rs::TS, JsonSchema)]
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
#[ts(export)]
pub struct ModuleInfo {
/// The ID of the module.
id: ModuleId,
/// Absolute path of the module's source file.
path: std::path::PathBuf,
}
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Copy, Clone, ts_rs::TS, JsonSchema, Hash, Eq)]
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
#[ts(export)]
pub struct SourceRange(#[ts(type = "[number, number]")] pub [usize; 2]);
pub struct SourceRange(#[ts(type = "[number, number]")] pub [usize; 3]);
impl From<[usize; 2]> for SourceRange {
fn from(value: [usize; 2]) -> Self {
impl From<[usize; 3]> for SourceRange {
fn from(value: [usize; 3]) -> Self {
Self(value)
}
}
impl SourceRange {
/// Create a new source range.
pub fn new(start: usize, end: usize) -> Self {
Self([start, end])
pub fn new(start: usize, end: usize, module_id: ModuleId) -> Self {
Self([start, end, module_id.as_usize()])
}
/// Get the start of the range.
@ -1400,6 +1434,11 @@ impl SourceRange {
self.0[1]
}
/// Get the module ID of the range.
pub fn module_id(&self) -> ModuleId {
ModuleId::from_usize(self.0[2])
}
/// Check if the range contains a position.
pub fn contains(&self, pos: usize) -> bool {
pos >= self.start() && pos <= self.end()
@ -1533,7 +1572,7 @@ impl From<SourceRange> for Metadata {
impl<T> From<NodeRef<'_, T>> for Metadata {
fn from(node: NodeRef<'_, T>) -> Self {
Self {
source_range: SourceRange::new(node.start, node.end),
source_range: SourceRange::new(node.start, node.end, node.module_id),
}
}
}
@ -2171,6 +2210,8 @@ impl ExecutorContext {
project_directory,
..Default::default()
};
// TODO: Use the top-level file's path.
exec_state.add_module(std::path::PathBuf::from(""));
// Before we even start executing the program, set the units.
self.engine
.batch_modeling_cmd(
@ -2210,6 +2251,13 @@ impl ExecutorContext {
BodyItem::ImportStatement(import_stmt) => {
let source_range = SourceRange::from(import_stmt);
let path = import_stmt.path.clone();
// Empty path is used by the top-level module.
if path.is_empty() {
return Err(KclError::Semantic(KclErrorDetails {
message: "import path cannot be empty".to_owned(),
source_ranges: vec![source_range],
}));
}
let resolved_path = if let Some(project_dir) = &exec_state.project_directory {
std::path::PathBuf::from(project_dir).join(&path)
} else {
@ -2230,8 +2278,9 @@ impl ExecutorContext {
source_ranges: vec![import_stmt.into()],
}));
}
let module_id = exec_state.add_module(resolved_path.clone());
let source = self.fs.read_to_string(&resolved_path, source_range).await?;
let program = crate::parser::parse(&source)?;
let program = crate::parser::parse(&source, module_id)?;
let (module_memory, module_exports) = {
exec_state.import_stack.push(resolved_path.clone());
let original_execution = self.engine.replace_execution_kind(ExecutionKind::Isolated);
@ -2359,7 +2408,7 @@ impl ExecutorContext {
// True here tells the engine to flush all the end commands as well like fillets
// and chamfers where the engine would otherwise eat the ID of the segments.
true,
SourceRange([program.end, program.end]),
SourceRange([program.end, program.end, program.module_id.as_usize()]),
)
.await?;
}
@ -2525,7 +2574,12 @@ fn assign_args_to_params(
if param.optional {
// If the corresponding parameter is optional,
// then it's fine, the user doesn't need to supply it.
let none = KclNone::new(param.identifier.start, param.identifier.end);
let none = Node {
inner: KclNone::new(),
start: param.identifier.start,
end: param.identifier.end,
module_id: param.identifier.module_id,
};
fn_memory.add(
&param.identifier.name,
KclValue::from(&none),
@ -2586,9 +2640,8 @@ mod tests {
use crate::ast::types::{Identifier, Node, Parameter};
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
let tokens = crate::token::lexer(code)?;
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?;
let program = crate::parser::top_level_parse(code)?;
let ctx = ExecutorContext {
engine: Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await?)),
fs: Arc::new(crate::fs::FileManager::new()),
@ -3027,7 +3080,7 @@ const answer = returnX()"#;
err,
KclError::UndefinedValue(KclErrorDetails {
message: "memory item key `x` is not defined".to_owned(),
source_ranges: vec![SourceRange([64, 65]), SourceRange([97, 106])],
source_ranges: vec![SourceRange([64, 65, 0]), SourceRange([97, 106, 0])],
}),
);
}
@ -3062,7 +3115,7 @@ let shape = layer() |> patternTransform(10, transform, %)
err,
KclError::UndefinedValue(KclErrorDetails {
message: "memory item key `x` is not defined".to_owned(),
source_ranges: vec![SourceRange([80, 81])],
source_ranges: vec![SourceRange([80, 81, 0])],
}),
);
}
@ -3317,7 +3370,7 @@ let notNull = !myNull
parse_execute(code1).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: null".to_owned(),
source_ranges: vec![SourceRange([56, 63])],
source_ranges: vec![SourceRange([56, 63, 0])],
})
);
@ -3326,7 +3379,7 @@ let notNull = !myNull
parse_execute(code2).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: 0".to_owned(),
source_ranges: vec![SourceRange([14, 16])],
source_ranges: vec![SourceRange([14, 16, 0])],
})
);
@ -3337,7 +3390,7 @@ let notEmptyString = !""
parse_execute(code3).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: \"\"".to_owned(),
source_ranges: vec![SourceRange([22, 25])],
source_ranges: vec![SourceRange([22, 25, 0])],
})
);
@ -3349,7 +3402,7 @@ let notMember = !obj.a
parse_execute(code4).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: 1".to_owned(),
source_ranges: vec![SourceRange([36, 42])],
source_ranges: vec![SourceRange([36, 42, 0])],
})
);
@ -3360,7 +3413,7 @@ let notArray = !a";
parse_execute(code5).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: []".to_owned(),
source_ranges: vec![SourceRange([27, 29])],
source_ranges: vec![SourceRange([27, 29, 0])],
})
);
@ -3371,7 +3424,7 @@ let notObject = !x";
parse_execute(code6).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Semantic(KclErrorDetails {
message: "Cannot apply unary operator ! to non-boolean value: {}".to_owned(),
source_ranges: vec![SourceRange([28, 30])],
source_ranges: vec![SourceRange([28, 30, 0])],
})
);
@ -3424,7 +3477,7 @@ let notTagIdentifier = !myTag";
parse_execute(code10).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Syntax(KclErrorDetails {
message: "Unexpected token: !".to_owned(),
source_ranges: vec![SourceRange([14, 15])],
source_ranges: vec![SourceRange([14, 15, 0])],
})
);
@ -3437,7 +3490,7 @@ let notPipeSub = 1 |> identity(!%))";
parse_execute(code11).await.unwrap_err().downcast::<KclError>().unwrap(),
KclError::Syntax(KclErrorDetails {
message: "Unexpected token: |>".to_owned(),
source_ranges: vec![SourceRange([54, 56])],
source_ranges: vec![SourceRange([54, 56, 0])],
})
);
@ -3483,7 +3536,7 @@ test([0, 0])
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
r#"undefined value: KclErrorDetails { source_ranges: [SourceRange([10, 34])], message: "Result of user-defined function test is undefined" }"#.to_owned()
r#"undefined value: KclErrorDetails { source_ranges: [SourceRange([10, 34, 0])], message: "Result of user-defined function test is undefined" }"#.to_owned()
);
}
@ -3600,7 +3653,7 @@ let w = f() + f()
vec![req_param("x")],
vec![],
Err(KclError::Semantic(KclErrorDetails {
source_ranges: vec![SourceRange([0, 0])],
source_ranges: vec![SourceRange([0, 0, 0])],
message: "Expected 1 arguments, got 0".to_owned(),
})),
),
@ -3618,7 +3671,7 @@ let w = f() + f()
vec![req_param("x"), opt_param("y")],
vec![],
Err(KclError::Semantic(KclErrorDetails {
source_ranges: vec![SourceRange([0, 0])],
source_ranges: vec![SourceRange([0, 0, 0])],
message: "Expected 1-2 arguments, got 0".to_owned(),
})),
),
@ -3645,7 +3698,7 @@ let w = f() + f()
vec![req_param("x"), opt_param("y")],
vec![mem(1), mem(2), mem(3)],
Err(KclError::Semantic(KclErrorDetails {
source_ranges: vec![SourceRange([0, 0])],
source_ranges: vec![SourceRange([0, 0, 0])],
message: "Expected 1-2 arguments, got 3".to_owned(),
})),
),
@ -3661,6 +3714,7 @@ let w = f() + f()
},
start: 0,
end: 0,
module_id: ModuleId::default(),
},
return_type: None,
digest: None,

View File

@ -29,7 +29,10 @@ fn lint_lower_camel_case_var(decl: &VariableDeclarator) -> Result<Vec<Discovered
let name = &ident.name;
if !name.is_case(convert_case::Case::Camel) {
findings.push(Z0001.at(format!("found '{}'", name), SourceRange::new(ident.start, ident.end)));
findings.push(Z0001.at(
format!("found '{}'", name),
SourceRange::new(ident.start, ident.end, ident.module_id),
));
return Ok(findings);
}
@ -42,7 +45,10 @@ fn lint_lower_camel_case_property(decl: &ObjectProperty) -> Result<Vec<Discovere
let name = &ident.name;
if !name.is_case(convert_case::Case::Camel) {
findings.push(Z0001.at(format!("found '{}'", name), SourceRange::new(ident.start, ident.end)));
findings.push(Z0001.at(
format!("found '{}'", name),
SourceRange::new(ident.start, ident.end, ident.module_id),
));
return Ok(findings);
}

View File

@ -144,7 +144,7 @@ pub fn lint_should_be_offset_plane(node: Node) -> Result<Vec<Discovered>> {
return Ok(vec![]);
};
let call_source_range = SourceRange::new(call.start, call.end);
let call_source_range = SourceRange::new(call.start, call.end, call.module_id);
Ok(vec![Z0003.at(
format!(
"custom plane in startSketchOn; offsetPlane from {} would work here",

View File

@ -28,7 +28,7 @@ fn lint_too_many_args_std_lib_function(
if exp.arguments.len() != 2 {
findings.push(Z0002.at(
format!("expected 2 arguments, found {}", exp.arguments.len()),
SourceRange::new(exp.start, exp.end),
SourceRange::new(exp.start, exp.end, exp.module_id),
));
}
return Ok(findings);
@ -38,7 +38,7 @@ fn lint_too_many_args_std_lib_function(
if exp.arguments.len() < 2 {
findings.push(Z0002.at(
format!("expected at least 2 arguments, found {}", exp.arguments.len()),
SourceRange::new(exp.start, exp.end),
SourceRange::new(exp.start, exp.end, exp.module_id),
));
}
return Ok(findings);
@ -48,7 +48,7 @@ fn lint_too_many_args_std_lib_function(
if exp.arguments.len() > fn_args_len {
findings.push(Z0002.at(
format!("expected {} arguments, found {}", fn_args_len, exp.arguments.len()),
SourceRange::new(exp.start, exp.end),
SourceRange::new(exp.start, exp.end, exp.module_id),
));
}

View File

@ -182,9 +182,7 @@ mod test {
macro_rules! assert_no_finding {
( $check:expr, $finding:expr, $kcl:expr ) => {
let tokens = $crate::token::lexer($kcl).unwrap();
let parser = $crate::parser::Parser::new(tokens);
let prog = parser.ast().unwrap();
let prog = $crate::parser::top_level_parse($kcl).unwrap();
for discovered_finding in prog.lint($check).unwrap() {
if discovered_finding.finding == $finding {
assert!(false, "Finding {:?} was emitted", $finding.code);
@ -195,9 +193,7 @@ mod test {
macro_rules! assert_finding {
( $check:expr, $finding:expr, $kcl:expr ) => {
let tokens = $crate::token::lexer($kcl).unwrap();
let parser = $crate::parser::Parser::new(tokens);
let prog = parser.ast().unwrap();
let prog = $crate::parser::top_level_parse($kcl).unwrap();
for discovered_finding in prog.lint($check).unwrap() {
if discovered_finding.finding == $finding {

View File

@ -40,7 +40,7 @@ use tower_lsp::{
};
use crate::{
ast::types::{Expr, Node, NodeRef, VariableKind},
ast::types::{Expr, ModuleId, Node, NodeRef, VariableKind},
executor::{IdGenerator, SourceRange},
lsp::{backend::Backend as _, util::IntoDiagnostic},
parser::PIPE_OPERATOR,
@ -188,7 +188,8 @@ impl crate::lsp::backend::Backend for Backend {
// We already updated the code map in the shared backend.
// Lets update the tokens.
let tokens = match crate::token::lexer(&params.text) {
let module_id = ModuleId::default();
let tokens = match crate::token::lexer(&params.text, module_id) {
Ok(tokens) => tokens,
Err(err) => {
self.add_to_diagnostics(&params, &[err], true).await;
@ -1235,7 +1236,8 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let Ok(tokens) = crate::token::lexer(current_code) else {
let module_id = ModuleId::default();
let Ok(tokens) = crate::token::lexer(current_code, module_id) else {
return Ok(None);
};
let parser = crate::parser::Parser::new(tokens);
@ -1251,7 +1253,7 @@ impl LanguageServer for Backend {
},
0,
);
let source_range = SourceRange([0, current_code.len()]);
let source_range = SourceRange::new(0, current_code.len(), module_id);
let range = source_range.to_lsp_range(current_code);
Ok(Some(vec![TextEdit {
new_text: recast,
@ -1272,7 +1274,8 @@ impl LanguageServer for Backend {
// Parse the ast.
// I don't know if we need to do this again since it should be updated in the context.
// But I figure better safe than sorry since this will write back out to the file.
let Ok(tokens) = crate::token::lexer(current_code) else {
let module_id = ModuleId::default();
let Ok(tokens) = crate::token::lexer(current_code, module_id) else {
return Ok(None);
};
let parser = crate::parser::Parser::new(tokens);
@ -1286,7 +1289,7 @@ impl LanguageServer for Backend {
ast.rename_symbol(&params.new_name, pos);
// Now recast it.
let recast = ast.recast(&Default::default(), 0);
let source_range = SourceRange([0, current_code.len() - 1]);
let source_range = SourceRange::new(0, current_code.len() - 1, module_id);
let range = source_range.to_lsp_range(current_code);
Ok(Some(WorkspaceEdit {
changes: Some(HashMap::from([(

View File

@ -1,5 +1,5 @@
use crate::{
ast::types::{Node, Program},
ast::types::{ModuleId, Node, Program},
errors::{KclError, KclErrorDetails},
executor::SourceRange,
token::{Token, TokenType},
@ -12,9 +12,15 @@ pub(crate) mod parser_impl;
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
pub const PIPE_OPERATOR: &str = "|>";
/// Parse the given KCL code into an AST. This is the top-level.
pub fn top_level_parse(code: &str) -> Result<Node<Program>, KclError> {
let module_id = ModuleId::default();
parse(code, module_id)
}
/// Parse the given KCL code into an AST.
pub fn parse(code: &str) -> Result<Node<Program>, KclError> {
let tokens = crate::token::lexer(code)?;
pub fn parse(code: &str, module_id: ModuleId) -> Result<Node<Program>, KclError> {
let tokens = crate::token::lexer(code, module_id)?;
let parser = Parser::new(tokens);
parser.ast()
}

View File

@ -5,7 +5,8 @@ mod tests {
($func_name:ident, $test_kcl_program:expr) => {
#[test]
fn $func_name() {
if let Ok(v) = $crate::token::lexer($test_kcl_program) {
let module_id = $crate::parser::ModuleId::default();
if let Ok(v) = $crate::token::lexer($test_kcl_program, module_id) {
let _ = $crate::parser::Parser::new(v).ast();
}
}

View File

@ -30,6 +30,7 @@ fn evaluate(rpn: Vec<BinaryExpressionToken>) -> Result<Node<BinaryExpression>, K
};
let start = left.start();
let end = right.end();
let module_id = left.module_id();
BinaryPart::BinaryExpression(Node::boxed(
BinaryExpression {
@ -40,6 +41,7 @@ fn evaluate(rpn: Vec<BinaryExpressionToken>) -> Result<Node<BinaryExpression>, K
},
start,
end,
module_id,
))
}
BinaryExpressionToken::Operand(o) => o,
@ -60,11 +62,11 @@ fn source_range(tokens: &[BinaryExpressionToken]) -> Vec<SourceRange> {
.iter()
.filter_map(|op| match op {
BinaryExpressionToken::Operator(_) => None,
BinaryExpressionToken::Operand(o) => Some((o.start(), o.end())),
BinaryExpressionToken::Operand(o) => Some((o.start(), o.end(), o.module_id())),
})
.collect();
match (sources.first(), sources.last()) {
(Some((start, _)), Some((_, end))) => vec![SourceRange([*start, *end])],
(Some((start, _, module_id)), Some((_, end, _))) => vec![SourceRange([*start, *end, module_id.as_usize()])],
_ => Vec::new(),
}
}
@ -124,7 +126,7 @@ impl From<BinaryOperator> for BinaryExpressionToken {
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::types::Literal;
use crate::ast::types::{Literal, ModuleId};
#[test]
fn parse_and_evaluate() {
@ -138,6 +140,7 @@ mod tests {
},
0,
0,
ModuleId::default(),
)))
}
let tests: Vec<Vec<BinaryExpressionToken>> = vec![
@ -158,6 +161,7 @@ mod tests {
},
0,
0,
ModuleId::default(),
))
.into(),
BinaryOperator::Pow.into(),

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,12 @@
use winnow::{
error::{ErrorKind, ParseError, StrContext},
stream::Stream,
Located,
};
use crate::{
errors::{KclError, KclErrorDetails},
executor::SourceRange,
token::Token,
token::{Input, Token},
};
/// Accumulate context while backtracking errors
@ -20,9 +19,10 @@ pub struct ContextError<C = StrContext> {
pub cause: Option<KclError>,
}
impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
fn from(err: ParseError<Located<&str>, winnow::error::ContextError>) -> Self {
impl From<ParseError<Input<'_>, winnow::error::ContextError>> for KclError {
fn from(err: ParseError<Input<'_>, winnow::error::ContextError>) -> Self {
let (input, offset): (Vec<char>, usize) = (err.input().chars().collect(), err.offset());
let module_id = err.input().state.module_id;
if offset >= input.len() {
// From the winnow docs:
@ -31,7 +31,7 @@ impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
// the end of input (input.len()) on eof errors.
return KclError::Lexical(KclErrorDetails {
source_ranges: vec![SourceRange([offset, offset])],
source_ranges: vec![SourceRange([offset, offset, module_id.as_usize()])],
message: "unexpected EOF while parsing".to_string(),
});
}
@ -42,7 +42,7 @@ impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
// TODO: Add the Winnow parser context to the error.
// See https://github.com/KittyCAD/modeling-app/issues/784
KclError::Lexical(KclErrorDetails {
source_ranges: vec![SourceRange([offset, offset + 1])],
source_ranges: vec![SourceRange([offset, offset + 1, module_id.as_usize()])],
message: format!("found unknown token '{}'", bad_token),
})
}

View File

@ -1,5 +1,5 @@
use crate::{
ast::types::{Node, Program},
ast::types::{ModuleId, Node, Program},
errors::KclError,
parser::Parser,
token::Token,
@ -44,7 +44,7 @@ fn read(filename: &'static str, test_name: &str) -> String {
fn tokenize(test_name: &str) {
let input = read("input.kcl", test_name);
let token_res = crate::token::lexer(&input);
let token_res = crate::token::lexer(&input, ModuleId::default());
assert_snapshot(test_name, "Result of tokenizing", || {
insta::assert_json_snapshot!("tokens", token_res);

View File

@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize};
use crate::{
ast::types::{BodyItem, Expr, FunctionExpression, Node, Program},
docs::{StdLibFn, StdLibFnData},
token::lexer,
};
pub trait KclStdLibFn: StdLibFn {
@ -83,8 +82,7 @@ impl Serialize for Box<dyn KclStdLibFn> {
/// Return the program and its single function.
/// Return None if those expectations aren't met.
pub fn extract_function(source: &str) -> Option<(Node<Program>, crate::ast::types::BoxNode<FunctionExpression>)> {
let tokens = lexer(source).unwrap();
let src = crate::parser::Parser::new(tokens).ast().ok()?;
let src = crate::parser::top_level_parse(source).ok()?;
assert_eq!(src.body.len(), 1);
let BodyItem::ExpressionStatement(expr) = src.body.last()? else {
panic!("expected expression statement");

View File

@ -17,9 +17,7 @@ pub struct RequestBody {
/// This returns the bytes of the snapshot.
pub async fn execute_and_snapshot(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
let ctx = new_context(units, true).await?;
let tokens = crate::token::lexer(code)?;
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?;
let program = crate::parser::top_level_parse(code)?;
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
}
@ -37,9 +35,7 @@ pub async fn execute_and_snapshot_ast(
pub async fn execute_and_snapshot_no_auth(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
let ctx = new_context(units, false).await?;
let tokens = crate::token::lexer(code)?;
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast()?;
let program = crate::parser::top_level_parse(code)?;
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
}

View File

@ -8,13 +8,16 @@ use tower_lsp::lsp_types::SemanticTokenType;
use winnow::stream::ContainsToken;
use crate::{
ast::types::{ItemVisibility, VariableKind},
ast::types::{ItemVisibility, ModuleId, VariableKind},
errors::KclError,
executor::SourceRange,
};
mod tokeniser;
// Re-export
pub use tokeniser::Input;
/// The types of tokens.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
#[cfg_attr(feature = "pyo3", pyo3::pyclass(eq, eq_int))]
@ -161,6 +164,8 @@ pub struct Token {
pub start: usize,
/// Offset in the source code where this token ends.
pub end: usize,
#[serde(default, skip_serializing_if = "ModuleId::is_top_level")]
pub module_id: ModuleId,
pub value: String,
}
@ -177,10 +182,16 @@ impl ContainsToken<Token> for TokenType {
}
impl Token {
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
pub fn from_range(
range: std::ops::Range<usize>,
module_id: ModuleId,
token_type: TokenType,
value: String,
) -> Self {
Self {
start: range.start,
end: range.end,
module_id,
value,
token_type,
}
@ -193,7 +204,7 @@ impl Token {
}
pub fn as_source_range(&self) -> SourceRange {
SourceRange([self.start, self.end])
SourceRange([self.start, self.end, self.module_id.as_usize()])
}
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
@ -227,18 +238,18 @@ impl Token {
impl From<Token> for SourceRange {
fn from(token: Token) -> Self {
Self([token.start, token.end])
Self([token.start, token.end, token.module_id.as_usize()])
}
}
impl From<&Token> for SourceRange {
fn from(token: &Token) -> Self {
Self([token.start, token.end])
Self([token.start, token.end, token.module_id.as_usize()])
}
}
pub fn lexer(s: &str) -> Result<Vec<Token>, KclError> {
tokeniser::lexer(s).map_err(From::from)
pub fn lexer(s: &str, module_id: ModuleId) -> Result<Vec<Token>, KclError> {
tokeniser::lexer(s, module_id).map_err(From::from)
}
#[cfg(test)]

File diff suppressed because it is too large Load Diff

View File

@ -573,7 +573,7 @@ impl FunctionExpression {
mod tests {
use pretty_assertions::assert_eq;
use crate::ast::types::FormatOptions;
use crate::ast::types::{FormatOptions, ModuleId};
#[test]
fn test_recast_if_else_if_same() {
@ -585,9 +585,7 @@ mod tests {
5
}
"#;
let tokens = crate::token::lexer(input).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
assert_eq!(output, input);
}
@ -600,9 +598,7 @@ mod tests {
5
}
"#;
let tokens = crate::token::lexer(input).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
assert_eq!(output, input);
}
@ -616,9 +612,7 @@ import a as aaa, b from "a.kcl"
import a, b as bbb from "a.kcl"
import a as aaa, b as bbb from "a.kcl"
"#;
let tokens = crate::token::lexer(input).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
assert_eq!(output, input);
}
@ -627,7 +621,7 @@ import a as aaa, b as bbb from "a.kcl"
fn test_recast_import_as_same_name() {
let input = r#"import a as a from "a.kcl"
"#;
let program = crate::parser::parse(input).unwrap();
let program = crate::parser::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
let expected = r#"import a from "a.kcl"
"#;
@ -640,9 +634,7 @@ import a as aaa, b as bbb from "a.kcl"
return 0
}
"#;
let tokens = crate::token::lexer(input).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(input).unwrap();
let output = program.recast(&Default::default(), 0);
assert_eq!(output, input);
}
@ -765,9 +757,7 @@ fn zoo = (x0, y0) => {
zoo(zoo_x, zoo_y)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
@ -836,9 +826,7 @@ outsideRevolve = startSketchOn('XZ')
|> line([overHangLength - thickness, 0], %)
|> close(%)
|> revolve({ axis: 'y' }, %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -914,9 +902,7 @@ outsideRevolve = startSketchOn('XZ')
let some_program_string = r#"bing = { yo: 55 }
myNestedVar = [{ prop: callExp(bing.yo) }]
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
@ -927,9 +913,7 @@ myNestedVar = [{ prop: callExp(bing.yo) }]
let some_program_string = r#"bing = { yo: 55 }
myNestedVar = [callExp(bing.yo)]
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
@ -941,9 +925,7 @@ myNestedVar = [callExp(bing.yo)]
ten = 10
bar = [0 + 1 .. ten]
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
@ -957,9 +939,7 @@ bar = [0 + 1 .. ten]
thing ( 1 )
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -982,9 +962,7 @@ myNestedVar = [
}
]
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1000,9 +978,7 @@ myNestedVar = [
#[test]
fn test_recast_empty_file() {
let some_program_string = r#""#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
// Its VERY important this comes back with zero new lines.
@ -1013,9 +989,7 @@ myNestedVar = [
fn test_recast_empty_file_new_line() {
let some_program_string = r#"
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
// Its VERY important this comes back with zero new lines.
@ -1026,14 +1000,12 @@ myNestedVar = [
fn test_recast_shebang_only() {
let some_program_string = r#"#!/usr/local/env zoo kcl"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let result = parser.ast();
let result = crate::parser::top_level_parse(some_program_string);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([21, 24])], message: "Unexpected end of file. The compiler expected a function body items (functions are made up of variable declarations, expressions, and return statements, each of those is a possible body item" }"#
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([21, 24, 0])], message: "Unexpected end of file. The compiler expected a function body items (functions are made up of variable declarations, expressions, and return statements, each of those is a possible body item" }"#
);
}
@ -1048,9 +1020,7 @@ part001 = startSketchOn('XY')
|> close(%)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1081,9 +1051,7 @@ part001 = startSketchOn('XY')
|> close(%)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1113,9 +1081,7 @@ part001 = startSketchOn('XY')
|> close(%)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1253,9 +1219,7 @@ tabs_l = startSketchOn({
distance: length - 10
}, %)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
// Its VERY important this comes back with zero new lines.
@ -1393,9 +1357,7 @@ tabs_l = startSketchOn({
|> close(%)
|> extrude(scale, %)
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1419,9 +1381,7 @@ tabs_l = startSketchOn({
|> startProfileAt([0.0, 5.0], %)
|> line([0.4900857016, -0.0240763666], %)
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1440,9 +1400,7 @@ tabs_l = startSketchOn({
|> startProfileAt([0.0, 5.0], %)
|> line([0.4900857016, -0.0240763666], %) // hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1461,9 +1419,7 @@ tabs_l = startSketchOn({
|> line([0.4900857016, -0.0240763666], %)
// hello world
|> line([0.6804562304, 0.9087880491], %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1488,9 +1444,7 @@ tabs_l = startSketchOn({
// this is also a comment
return things
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1514,9 +1468,7 @@ tabs_l = startSketchOn({
// this is also a comment
thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1537,9 +1489,7 @@ key = 'c'
// hello
thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1567,9 +1517,7 @@ thing = 'c'
foo = 'bar' //
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1595,9 +1543,7 @@ foo = 'bar' //
// hello
thing = 'foo'
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1616,9 +1562,7 @@ thing = 'foo'
/* comment at start */
mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(test_program).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1648,9 +1592,7 @@ mySk1 = startSketchOn('XY')
|> ry(45, %)
|> rx(45, %)
// one more for good measure"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1686,9 +1628,7 @@ mySk1 = startSketchOn('XY')
intersectTag: seg01
}, %)
|> line([-0.42, -1.72], %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -1712,9 +1652,7 @@ yo = [
" hey oooooo really long long long"
]
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted, some_program_string);
@ -1730,9 +1668,7 @@ key = 'c'
things = "things"
// this is also a comment"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
let expected = some_program_string.trim();
@ -1751,9 +1687,7 @@ things = "things"
// a comment
"
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string.trim());
@ -1777,9 +1711,7 @@ part001 = startSketchOn('XY')
-angleToMatchLengthY(seg01, myVar, %),
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -1804,9 +1736,7 @@ part001 = startSketchOn('XY')
myVar
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(
&FormatOptions {
@ -1835,9 +1765,7 @@ fn ghi = (part001) => {
return part001
}
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
let mut program = crate::parser::top_level_parse(some_program_string).unwrap();
program.rename_symbol("mySuperCoolPart", 6);
let recasted = program.recast(&Default::default(), 0);
@ -1865,9 +1793,7 @@ fn ghi = (part001) => {
let some_program_string = r#"fn ghi = (x, y, z) => {
return x
}"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let mut program = parser.ast().unwrap();
let mut program = crate::parser::top_level_parse(some_program_string).unwrap();
program.rename_symbol("newName", 10);
let recasted = program.recast(&Default::default(), 0);
@ -1889,9 +1815,7 @@ fn ghi = (part001) => {
angle_start: 0,
angle_end: 180,
}, %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1921,9 +1845,7 @@ firstExtrude = startSketchOn('XY')
|> close(%)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1960,9 +1882,7 @@ firstExtrude = startSketchOn('XY')
|> close(%)
|> extrude(h, %)
"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
@ -1988,9 +1908,7 @@ firstExtrude = startSketchOn('XY')
#[tokio::test(flavor = "multi_thread")]
async fn test_recast_math_start_negative() {
let some_program_string = r#"myVar = -5 + 6"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -2007,9 +1925,7 @@ startSketchOn('XY')
|> line([0, -(5 - thickness)], %)
|> line([0, -(5 - 1)], %)
|> line([0, -(-5 - 1)], %)"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -2023,9 +1939,7 @@ FOS = 2
sigmaAllow = 8
width = 20
thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -2034,9 +1948,7 @@ thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
#[tokio::test(flavor = "multi_thread")]
async fn no_vardec_keyword() {
let some_program_string = r#"distance = 5"#;
let tokens = crate::token::lexer(some_program_string).unwrap();
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let program = crate::parser::top_level_parse(some_program_string).unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(recasted.trim(), some_program_string);
@ -2066,7 +1978,7 @@ thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(raw).unwrap();
let tokens = crate::token::lexer(raw, ModuleId::default()).unwrap();
let literal = crate::parser::parser_impl::unsigned_number_literal
.parse(&tokens)
.unwrap();
@ -2099,9 +2011,7 @@ sketch002 = startSketchOn({
}
})
"#;
let tokens = crate::token::lexer(input).unwrap();
let p = crate::parser::Parser::new(tokens);
let ast = p.ast().unwrap();
let ast = crate::parser::top_level_parse(input).unwrap();
let actual = ast.recast(&FormatOptions::new(), 0);
assert_eq!(actual, expected);
}
@ -2127,7 +2037,7 @@ sketch002 = startSketchOn({
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(input).unwrap();
let tokens = crate::token::lexer(input, ModuleId::default()).unwrap();
crate::parser::parser_impl::print_tokens(&tokens);
let expr = crate::parser::parser_impl::object.parse(&tokens).unwrap();
assert_eq!(
@ -2225,7 +2135,7 @@ sketch002 = startSketchOn({
.into_iter()
.enumerate()
{
let tokens = crate::token::lexer(input).unwrap();
let tokens = crate::token::lexer(input, ModuleId::default()).unwrap();
let expr = crate::parser::parser_impl::array_elem_by_elem.parse(&tokens).unwrap();
assert_eq!(
expr.recast(&FormatOptions::new(), 0, false),

View File

@ -42,30 +42,30 @@ pub enum Node<'a> {
impl From<&Node<'_>> for SourceRange {
fn from(node: &Node) -> Self {
match node {
Node::Program(p) => SourceRange([p.start, p.end]),
Node::ImportStatement(e) => SourceRange([e.start, e.end]),
Node::ExpressionStatement(e) => SourceRange([e.start, e.end]),
Node::VariableDeclaration(v) => SourceRange([v.start, v.end]),
Node::ReturnStatement(r) => SourceRange([r.start, r.end]),
Node::VariableDeclarator(v) => SourceRange([v.start, v.end]),
Node::Literal(l) => SourceRange([l.start, l.end]),
Node::TagDeclarator(t) => SourceRange([t.start, t.end]),
Node::Identifier(i) => SourceRange([i.start, i.end]),
Node::BinaryExpression(b) => SourceRange([b.start, b.end]),
Node::FunctionExpression(f) => SourceRange([f.start, f.end]),
Node::CallExpression(c) => SourceRange([c.start, c.end]),
Node::PipeExpression(p) => SourceRange([p.start, p.end]),
Node::PipeSubstitution(p) => SourceRange([p.start, p.end]),
Node::ArrayExpression(a) => SourceRange([a.start, a.end]),
Node::ArrayRangeExpression(a) => SourceRange([a.start, a.end]),
Node::ObjectExpression(o) => SourceRange([o.start, o.end]),
Node::MemberExpression(m) => SourceRange([m.start, m.end]),
Node::UnaryExpression(u) => SourceRange([u.start, u.end]),
Node::Parameter(p) => SourceRange([p.identifier.start, p.identifier.end]),
Node::ObjectProperty(o) => SourceRange([o.start, o.end]),
Node::MemberObject(m) => SourceRange([m.start(), m.end()]),
Node::IfExpression(m) => SourceRange([m.start, m.end]),
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end()]),
Node::Program(n) => SourceRange::from(*n),
Node::ImportStatement(n) => SourceRange::from(*n),
Node::ExpressionStatement(n) => SourceRange::from(*n),
Node::VariableDeclaration(n) => SourceRange::from(*n),
Node::ReturnStatement(n) => SourceRange::from(*n),
Node::VariableDeclarator(n) => SourceRange::from(*n),
Node::Literal(n) => SourceRange::from(*n),
Node::TagDeclarator(n) => SourceRange::from(*n),
Node::Identifier(n) => SourceRange::from(*n),
Node::BinaryExpression(n) => SourceRange::from(*n),
Node::FunctionExpression(n) => SourceRange::from(*n),
Node::CallExpression(n) => SourceRange::from(*n),
Node::PipeExpression(n) => SourceRange::from(*n),
Node::PipeSubstitution(n) => SourceRange::from(*n),
Node::ArrayExpression(n) => SourceRange::from(*n),
Node::ArrayRangeExpression(n) => SourceRange::from(*n),
Node::ObjectExpression(n) => SourceRange::from(*n),
Node::MemberExpression(n) => SourceRange::from(*n),
Node::UnaryExpression(n) => SourceRange::from(*n),
Node::Parameter(p) => SourceRange::from(&p.identifier),
Node::ObjectProperty(n) => SourceRange::from(*n),
Node::MemberObject(m) => SourceRange([m.start(), m.end(), m.module_id().as_usize()]),
Node::IfExpression(n) => SourceRange::from(*n),
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end(), l.module_id().as_usize()]),
}
}
}

View File

@ -315,9 +315,7 @@ mod tests {
macro_rules! kcl {
( $kcl:expr ) => {{
let tokens = $crate::token::lexer($kcl).unwrap();
let parser = $crate::parser::Parser::new(tokens);
parser.ast().unwrap()
$crate::parser::top_level_parse($kcl).unwrap()
}};
}

View File

@ -123,7 +123,8 @@ snapshot_kind: text
{
"sourceRange": [
7,
32
32,
0
]
}
]
@ -136,7 +137,8 @@ snapshot_kind: text
{
"sourceRange": [
38,
834
834,
0
]
}
]

View File

@ -43,7 +43,8 @@ snapshot_kind: text
{
"sourceRange": [
6,
15
15,
0
]
}
]
@ -61,7 +62,8 @@ snapshot_kind: text
{
"sourceRange": [
27,
39
39,
0
]
}
]
@ -80,7 +82,8 @@ snapshot_kind: text
{
"sourceRange": [
51,
68
68,
0
]
}
]

View File

@ -39,7 +39,8 @@ snapshot_kind: text
{
"sourceRange": [
175,
188
188,
0
]
}
]
@ -52,7 +53,8 @@ snapshot_kind: text
{
"sourceRange": [
79,
80
80,
0
]
}
]
@ -71,7 +73,8 @@ snapshot_kind: text
{
"sourceRange": [
5,
11
11,
0
]
}
]
@ -90,7 +93,8 @@ snapshot_kind: text
{
"sourceRange": [
95,
107
107,
0
]
}
]
@ -110,7 +114,8 @@ snapshot_kind: text
{
"sourceRange": [
194,
206
206,
0
]
}
]
@ -128,7 +133,8 @@ snapshot_kind: text
{
"sourceRange": [
341,
373
373,
0
]
}
]
@ -141,7 +147,8 @@ snapshot_kind: text
{
"sourceRange": [
88,
89
89,
0
]
}
]

View File

@ -51,7 +51,8 @@ snapshot_kind: text
{
"sourceRange": [
5,
19
19,
0
]
}
]

View File

@ -751,7 +751,8 @@ snapshot_kind: text
{
"sourceRange": [
10,
316
316,
0
]
}
]
@ -766,7 +767,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
206,
219
219,
0
],
"tag": null,
"type": "extrudePlane"
@ -776,7 +778,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
225,
238
238,
0
],
"tag": null,
"type": "extrudePlane"
@ -786,7 +789,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
244,
257
257,
0
],
"tag": null,
"type": "extrudePlane"
@ -796,7 +800,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
263,
276
276,
0
],
"tag": null,
"type": "extrudePlane"
@ -811,7 +816,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
206,
219
219,
0
]
},
"from": [
@ -830,7 +836,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
225,
238
238,
0
]
},
"from": [
@ -849,7 +856,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
244,
257
257,
0
]
},
"from": [
@ -868,7 +876,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
263,
276
276,
0
]
},
"from": [
@ -887,7 +896,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
282,
290
290,
0
]
},
"from": [
@ -942,7 +952,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
183,
200
200,
0
]
}
},
@ -950,7 +961,8 @@ snapshot_kind: text
{
"sourceRange": [
183,
200
200,
0
]
}
]
@ -962,7 +974,8 @@ snapshot_kind: text
{
"sourceRange": [
183,
200
200,
0
]
}
]

View File

@ -1,5 +1,5 @@
---
source: kcl/src/tests.rs
source: kcl/src/simulation_tests.rs
description: Result of tokenizing cube.kcl
snapshot_kind: text
---

View File

@ -123,7 +123,8 @@ snapshot_kind: text
{
"sourceRange": [
15,
40
40,
0
]
}
]
@ -140,7 +141,8 @@ snapshot_kind: text
{
"sourceRange": [
47,
53
53,
0
]
}
]
@ -157,7 +159,8 @@ snapshot_kind: text
{
"sourceRange": [
90,
107
107,
0
]
}
]

View File

@ -41,7 +41,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
35,
76
76,
0
],
"tag": null,
"type": "extrudeArc"
@ -56,7 +57,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
35,
76
76,
0
]
},
"ccw": true,
@ -117,7 +119,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
35,
76
76,
0
]
}
},
@ -125,7 +128,8 @@ snapshot_kind: text
{
"sourceRange": [
35,
76
76,
0
]
}
]
@ -137,7 +141,8 @@ snapshot_kind: text
{
"sourceRange": [
35,
76
76,
0
]
}
]

View File

@ -39,7 +39,8 @@ snapshot_kind: text
{
"sourceRange": [
64,
65
65,
0
]
}
]
@ -52,7 +53,8 @@ snapshot_kind: text
{
"sourceRange": [
199,
200
200,
0
]
}
]
@ -65,7 +67,8 @@ snapshot_kind: text
{
"sourceRange": [
332,
333
333,
0
]
}
]

View File

@ -43,7 +43,8 @@ snapshot_kind: text
{
"sourceRange": [
43,
55
55,
0
]
}
]
@ -56,7 +57,8 @@ snapshot_kind: text
{
"sourceRange": [
256,
266
266,
0
]
}
]
@ -69,7 +71,8 @@ snapshot_kind: text
{
"sourceRange": [
93,
101
101,
0
]
}
]
@ -82,7 +85,8 @@ snapshot_kind: text
{
"sourceRange": [
277,
285
285,
0
]
}
]

View File

@ -42,7 +42,8 @@ snapshot_kind: text
{
"sourceRange": [
56,
74
74,
0
]
}
]
@ -60,7 +61,8 @@ snapshot_kind: text
{
"sourceRange": [
529,
543
543,
0
]
}
]
@ -73,7 +75,8 @@ snapshot_kind: text
{
"sourceRange": [
122,
132
132,
0
]
}
]
@ -86,7 +89,8 @@ snapshot_kind: text
{
"sourceRange": [
356,
362
362,
0
]
}
]
@ -99,7 +103,8 @@ snapshot_kind: text
{
"sourceRange": [
553,
570
570,
0
]
}
]
@ -112,7 +117,8 @@ snapshot_kind: text
{
"sourceRange": [
757,
770
770,
0
]
}
]
@ -125,7 +131,8 @@ snapshot_kind: text
{
"sourceRange": [
342,
347
347,
0
]
}
]

View File

@ -329,7 +329,8 @@ snapshot_kind: text
{
"sourceRange": [
10,
157
157,
0
]
}
]
@ -957,7 +958,8 @@ snapshot_kind: text
{
"sourceRange": [
10,
157
157,
0
]
}
]
@ -973,7 +975,8 @@ snapshot_kind: text
{
"sourceRange": [
170,
369
369,
0
]
}
]
@ -986,7 +989,8 @@ snapshot_kind: text
{
"sourceRange": [
52,
77
77,
0
]
}
],
@ -1023,7 +1027,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
83,
98
98,
0
]
},
"from": [
@ -1042,7 +1047,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
104,
119
119,
0
]
},
"from": [
@ -1061,7 +1067,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
125,
141
141,
0
]
},
"from": [
@ -1080,7 +1087,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
147,
155
155,
0
]
},
"from": [
@ -1100,7 +1108,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
52,
77
77,
0
]
},
"from": [
@ -1119,7 +1128,8 @@ snapshot_kind: text
{
"sourceRange": [
52,
77
77,
0
]
}
]
@ -1134,7 +1144,8 @@ snapshot_kind: text
{
"sourceRange": [
242,
267
267,
0
]
}
],
@ -1171,7 +1182,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
277,
292
292,
0
]
},
"from": [
@ -1190,7 +1202,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
302,
317
317,
0
]
},
"from": [
@ -1209,7 +1222,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
327,
343
343,
0
]
},
"from": [
@ -1228,7 +1242,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
353,
361
361,
0
]
},
"from": [
@ -1248,7 +1263,8 @@ snapshot_kind: text
"id": "[uuid]",
"sourceRange": [
242,
267
267,
0
]
},
"from": [
@ -1269,7 +1285,8 @@ snapshot_kind: text
{
"sourceRange": [
187,
367
367,
0
]
}
]

View File

@ -8,7 +8,7 @@ use std::{
use futures::stream::TryStreamExt;
use gloo_utils::format::JsValueSerdeExt;
use kcl_lib::{
ast::types::{Node, Program},
ast::types::{ModuleId, Node, Program},
coredump::CoreDump,
engine::EngineManager,
executor::ExecutorSettings,
@ -153,9 +153,11 @@ pub async fn modify_ast_for_sketch_wasm(
.map_err(|e| format!("{:?}", e))?,
));
let module_id = ModuleId::default();
let _ = kcl_lib::ast::modify::modify_ast_for_sketch(
&engine,
&mut program,
module_id,
sketch_name,
plane,
uuid::Uuid::parse_str(sketch_id).map_err(|e| e.to_string())?,
@ -193,7 +195,8 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
console_error_panic_hook::set_once();
let tokens = kcl_lib::token::lexer(js).map_err(JsError::from)?;
let module_id = ModuleId::default();
let tokens = kcl_lib::token::lexer(js, module_id).map_err(JsError::from)?;
Ok(JsValue::from_serde(&tokens)?)
}
@ -201,7 +204,8 @@ pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
pub fn parse_wasm(js: &str) -> Result<JsValue, String> {
console_error_panic_hook::set_once();
let tokens = kcl_lib::token::lexer(js).map_err(String::from)?;
let module_id = ModuleId::default();
let tokens = kcl_lib::token::lexer(js, module_id).map_err(String::from)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast().map_err(String::from)?;
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the

View File

@ -28,7 +28,7 @@ async fn kcl_test_fillet_duplicate_tags() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([203, 249])], message: "Duplicate tags are not allowed." }"#,
r#"type: KclErrorDetails { source_ranges: [SourceRange([203, 249, 0])], message: "Duplicate tags are not allowed." }"#,
);
}
@ -83,7 +83,7 @@ async fn kcl_test_execute_engine_error_return() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"engine: KclErrorDetails { source_ranges: [SourceRange([216, 229])], message: "Modeling command failed: [ApiError { error_code: BadRequest, message: \"The path is not closed. Solid2D construction requires a closed path!\" }]" }"#,
r#"engine: KclErrorDetails { source_ranges: [SourceRange([216, 229, 0])], message: "Modeling command failed: [ApiError { error_code: BadRequest, message: \"The path is not closed. Solid2D construction requires a closed path!\" }]" }"#,
);
}
@ -515,7 +515,7 @@ async fn kcl_test_import_file_doesnt_exist() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 27])], message: "File `thing.obj` does not exist." }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 27, 0])], message: "File `thing.obj` does not exist." }"#
);
}
@ -583,7 +583,7 @@ async fn kcl_test_import_ext_doesnt_match() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 76])], message: "The given format does not match the file extension. Expected: `gltf`, Given: `obj`" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 76, 0])], message: "The given format does not match the file extension. Expected: `gltf`, Given: `obj`" }"#
);
}
@ -742,7 +742,7 @@ part002 = startSketchOn(part001, part001.sketch.tags.here)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([88, 133]), SourceRange([210, 226])], message: "could not sketch tangential arc, because its center would be infinitely far away in the X direction" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([88, 133, 0]), SourceRange([210, 226, 0])], message: "could not sketch tangential arc, because its center would be infinitely far away in the X direction" }"#
);
}
@ -799,7 +799,7 @@ async fn kcl_test_stdlib_kcl_error_right_code_path() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([151, 189])], message: "Expected an argument at index 1" }"#,
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([151, 189, 0])], message: "Expected an argument at index 1" }"#,
);
}
@ -869,7 +869,7 @@ part = rectShape([0, 0], 20, 20)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([863, 912])], message: "Argument at index 0 was supposed to be type kcl_lib::std::shapes::CircleData but found string (text)" }"#,
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([863, 912, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::shapes::CircleData but found string (text)" }"#,
);
}
@ -954,7 +954,7 @@ async fn kcl_test_revolve_bad_angle_low() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 308])], message: "Expected angle to be between -360 and 360 and not 0, found `-455`" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 308, 0])], message: "Expected angle to be between -360 and 360 and not 0, found `-455`" }"#
);
}
@ -979,7 +979,7 @@ async fn kcl_test_revolve_bad_angle_high() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 307])], message: "Expected angle to be between -360 and 360 and not 0, found `455`" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 307, 0])], message: "Expected angle to be between -360 and 360 and not 0, found `455`" }"#
);
}
@ -1073,7 +1073,7 @@ sketch001 = startSketchOn(box, revolveAxis)
//this fails right now, but slightly differently, lets just say its enough for it to fail - mike
//assert_eq!(
// result.err().unwrap().to_string(),
// r#"engine: KclErrorDetails { source_ranges: [SourceRange([346, 390])], message: "Modeling command failed: [ApiError { error_code: InternalEngine, message: \"Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis\" }]" }"#
// r#"engine: KclErrorDetails { source_ranges: [SourceRange([346, 390, 0])], message: "Modeling command failed: [ApiError { error_code: InternalEngine, message: \"Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis\" }]" }"#
//);
}
@ -1354,7 +1354,7 @@ secondSketch = startSketchOn(part001, '')
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([260, 286])], message: "Argument at index 1 was supposed to be type kcl_lib::std::sketch::FaceTag but found string (text)" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([260, 286, 0])], message: "Argument at index 1 was supposed to be type kcl_lib::std::sketch::FaceTag but found string (text)" }"#
);
}
@ -1385,7 +1385,7 @@ extrusion = startSketchOn('XY')
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([68, 334]), SourceRange([428, 461])], message: "Expected 2 arguments, got 3" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([68, 334, 0]), SourceRange([428, 461, 0])], message: "Expected 2 arguments, got 3" }"#
);
}
@ -1681,7 +1681,7 @@ part001 = cube([0,0], 20)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([259, 345])], message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag." }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([259, 345, 0])], message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag." }"#
);
}
@ -1708,7 +1708,7 @@ let p = triangle(200)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"value already defined: KclErrorDetails { source_ranges: [SourceRange([311, 313]), SourceRange([326, 339])], message: "Cannot redefine `a`" }"#
r#"value already defined: KclErrorDetails { source_ranges: [SourceRange([311, 313, 0]), SourceRange([326, 339, 0])], message: "Cannot redefine `a`" }"#
);
}
@ -1783,7 +1783,7 @@ async fn kcl_test_arc_error_same_start_end() {
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([57, 140])], message: "Arc start and end angles must be different" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([57, 140, 0])], message: "Arc start and end angles must be different" }"#
);
}
@ -1803,7 +1803,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 111])], message: "Cannot have an x constrained angle of 90 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 111, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
);
}
@ -1823,7 +1823,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112])], message: "Cannot have an x constrained angle of 270 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112, 0])], message: "Cannot have an x constrained angle of 270 degrees" }"#
);
}
@ -1843,7 +1843,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 110])], message: "Cannot have a y constrained angle of 0 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 110, 0])], message: "Cannot have a y constrained angle of 0 degrees" }"#
);
}
@ -1863,7 +1863,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112])], message: "Cannot have a y constrained angle of 180 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
);
}
@ -1883,7 +1883,7 @@ extrusion = extrude(10, sketch001)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125])], message: "Cannot have an x constrained angle of 90 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
);
}
@ -1903,7 +1903,7 @@ extrusion = extrude(10, sketch001)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125])], message: "Cannot have an x constrained angle of 90 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
);
}
@ -1925,7 +1925,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 142])], message: "Cannot have a y constrained angle of 0 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 142, 0])], message: "Cannot have a y constrained angle of 0 degrees" }"#
);
}
@ -1947,7 +1947,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 144])], message: "Cannot have a y constrained angle of 180 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 144, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
);
}
@ -1969,7 +1969,7 @@ example = extrude(10, exampleSketch)
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 145])], message: "Cannot have a y constrained angle of 180 degrees" }"#
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 145, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
);
}
@ -1986,7 +1986,7 @@ someFunction('INVALID')
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([37, 61]), SourceRange([65, 88])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([37, 61, 0]), SourceRange([65, 88, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
);
}
@ -2007,7 +2007,7 @@ someFunction('INVALID')
assert!(result.is_err());
assert_eq!(
result.err().unwrap().to_string(),
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([89, 114]), SourceRange([126, 155]), SourceRange([159, 182])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([89, 114, 0]), SourceRange([126, 155, 0]), SourceRange([159, 182, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
);
}

View File

@ -1,5 +1,5 @@
use kcl_lib::{
ast::types::{Node, Program},
ast::types::{ModuleId, Node, Program},
errors::KclError,
executor::{ExecutorContext, IdGenerator},
parser,
@ -28,7 +28,8 @@ macro_rules! gen_test_parse_fail {
}
async fn setup(program: &str) -> (ExecutorContext, Node<Program>, IdGenerator) {
let tokens = kcl_lib::token::lexer(program).unwrap();
let module_id = ModuleId::default();
let tokens = kcl_lib::token::lexer(program, module_id).unwrap();
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let ctx = kcl_lib::executor::ExecutorContext {
@ -60,7 +61,7 @@ async fn run_fail(code: &str) -> KclError {
}
async fn run_parse_fail(code: &str) -> KclError {
let Err(e) = parser::parse(code) else {
let Err(e) = parser::top_level_parse(code) else {
panic!("Expected this KCL program to fail to parse, but it (incorrectly) never threw an error.");
};
e

View File

@ -2,7 +2,7 @@ use anyhow::Result;
use kcl_lib::{
ast::{
modify::modify_ast_for_sketch,
types::{Node, Program},
types::{ModuleId, Node, Program},
},
executor::{ExecutorContext, IdGenerator, KclValue, PlaneType, Sketch, SourceRange},
};
@ -10,10 +10,9 @@ use kittycad_modeling_cmds::{each_cmd as mcmd, length_unit::LengthUnit, shared::
use pretty_assertions::assert_eq;
/// Setup the engine and parse code for an ast.
async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>, uuid::Uuid)> {
let tokens = kcl_lib::token::lexer(code)?;
let parser = kcl_lib::parser::Parser::new(tokens);
let program = parser.ast()?;
async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>, ModuleId, uuid::Uuid)> {
let module_id = ModuleId::default();
let program = kcl_lib::parser::parse(code, module_id)?;
let ctx = kcl_lib::executor::ExecutorContext::new_with_default_client(Default::default()).await?;
let exec_state = ctx.run(&program, None, IdGenerator::default(), None).await?;
@ -60,7 +59,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>
)
.await?;
Ok((ctx, program, sketch_id))
Ok((ctx, program, module_id, sketch_id))
}
#[tokio::test(flavor = "multi_thread")]
@ -76,9 +75,9 @@ async fn kcl_test_modify_sketch_part001() {
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
.await
.unwrap();
@ -101,9 +100,9 @@ async fn kcl_test_modify_sketch_part002() {
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
.await
.unwrap();
@ -128,9 +127,9 @@ async fn kcl_test_modify_close_sketch() {
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
.await
.unwrap();
@ -154,9 +153,9 @@ async fn kcl_test_modify_line_to_close_sketch() {
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
.await
.unwrap();
@ -191,14 +190,14 @@ const {} = startSketchOn("XY")
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let result = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id).await;
let result = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id).await;
assert!(result.is_err());
assert_eq!(
result.unwrap_err().to_string(),
r#"engine: KclErrorDetails { source_ranges: [SourceRange([188, 193])], message: "Sketch part002 is constrained `partial` and cannot be modified" }"#
r#"engine: KclErrorDetails { source_ranges: [SourceRange([188, 193, 0])], message: "Sketch part002 is constrained `partial` and cannot be modified" }"#
);
}
@ -216,9 +215,9 @@ async fn kcl_test_modify_line_should_close_sketch() {
name
);
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
let mut new_program = program.clone();
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
.await
.unwrap();