Fix KCL source ranges to know which source file they point to (#4418)
* Add ts_rs feature to work with indexmap * Add feature for schemars to work with indexmap * Add module ID to intern module paths * Update code to use new source range with three fields * Update generated files * Update docs * Fix wasm * Fix TS code to use new SourceRange * Fix TS tests to use new SourceRange and moduleId * Fix formatting * Fix to filter errors and source ranges to only show the top-level module * Fix to reuse module IDs * Fix to disallow empty path for import * Revert unneeded Self change * Rename field to be clearer * Fix parser tests * Update snapshots * Change to not serialize module_id of 0 * Update snapshots after adding default module_id * Move module_id functions to separate module * Fix tests for console errors * Proposal: module ID = 0 gets skipped when serializing tokens too (#4422) Just like in AST nodes. Also I think "is_top_level" communicates intention better than is_default --------- Co-authored-by: Adam Chalmers <adam.chalmers@zoo.dev>
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@ -18,7 +18,7 @@ export const isErrorWhitelisted = (exception: Error) => {
|
|||||||
{
|
{
|
||||||
name: '"{"kind"',
|
name: '"{"kind"',
|
||||||
message:
|
message:
|
||||||
'"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}"',
|
'"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}"',
|
||||||
stack: '',
|
stack: '',
|
||||||
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
|
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
|
||||||
project: 'Google Chrome',
|
project: 'Google Chrome',
|
||||||
@ -156,8 +156,8 @@ export const isErrorWhitelisted = (exception: Error) => {
|
|||||||
{
|
{
|
||||||
name: 'Unhandled Promise Rejection',
|
name: 'Unhandled Promise Rejection',
|
||||||
message:
|
message:
|
||||||
'{"kind":"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}',
|
'{"kind":"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: `JsValue(undefined)`"}',
|
||||||
stack: `Unhandled Promise Rejection: {"kind":"engine","sourceRanges":[[0,0]],"msg":"Failed to get string from response from engine: \`JsValue(undefined)\`"}
|
stack: `Unhandled Promise Rejection: {"kind":"engine","sourceRanges":[[0,0,0]],"msg":"Failed to get string from response from engine: \`JsValue(undefined)\`"}
|
||||||
at unknown (http://localhost:3000/src/lang/std/engineConnection.ts:1245:26)`,
|
at unknown (http://localhost:3000/src/lang/std/engineConnection.ts:1245:26)`,
|
||||||
foundInSpec:
|
foundInSpec:
|
||||||
'e2e/playwright/onboarding-tests.spec.ts Click through each onboarding step',
|
'e2e/playwright/onboarding-tests.spec.ts Click through each onboarding step',
|
||||||
@ -253,7 +253,7 @@ export const isErrorWhitelisted = (exception: Error) => {
|
|||||||
{
|
{
|
||||||
name: '{"kind"',
|
name: '{"kind"',
|
||||||
stack: ``,
|
stack: ``,
|
||||||
message: `engine","sourceRanges":[[0,0]],"msg":"Failed to wait for promise from engine: JsValue(\\"Force interrupt, executionIsStale, new AST requested\\")"}`,
|
message: `engine","sourceRanges":[[0,0,0]],"msg":"Failed to wait for promise from engine: JsValue(\\"Force interrupt, executionIsStale, new AST requested\\")"}`,
|
||||||
project: 'Google Chrome',
|
project: 'Google Chrome',
|
||||||
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
|
foundInSpec: 'e2e/playwright/testing-settings.spec.ts',
|
||||||
},
|
},
|
||||||
|
@ -43,14 +43,14 @@ describe('processMemory', () => {
|
|||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
sourceRange: [170, 194],
|
sourceRange: [170, 194, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
sourceRange: [202, 230],
|
sourceRange: [202, 230, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
theSketch: [
|
theSketch: [
|
||||||
|
@ -38,6 +38,7 @@ export class KclManager {
|
|||||||
body: [],
|
body: [],
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
nonCodeMeta: {
|
nonCodeMeta: {
|
||||||
nonCodeNodes: {},
|
nonCodeNodes: {},
|
||||||
startNodes: [],
|
startNodes: [],
|
||||||
@ -204,6 +205,7 @@ export class KclManager {
|
|||||||
body: [],
|
body: [],
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
nonCodeMeta: {
|
nonCodeMeta: {
|
||||||
nonCodeNodes: {},
|
nonCodeNodes: {},
|
||||||
startNodes: [],
|
startNodes: [],
|
||||||
|
@ -1903,6 +1903,6 @@ describe('parsing errors', () => {
|
|||||||
const error = result as KCLError
|
const error = result as KCLError
|
||||||
expect(error.kind).toBe('syntax')
|
expect(error.kind).toBe('syntax')
|
||||||
expect(error.msg).toBe('Unexpected token: (')
|
expect(error.msg).toBe('Unexpected token: (')
|
||||||
expect(error.sourceRanges).toEqual([[27, 28]])
|
expect(error.sourceRanges).toEqual([[27, 28, 0]])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -19,7 +19,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
const sketch001 = execState.memory.get('mySketch001')
|
const sketch001 = execState.memory.get('mySketch001')
|
||||||
expect(sketch001).toEqual({
|
expect(sketch001).toEqual({
|
||||||
type: 'UserVal',
|
type: 'UserVal',
|
||||||
__meta: [{ sourceRange: [46, 71] }],
|
__meta: [{ sourceRange: [46, 71, 0] }],
|
||||||
value: {
|
value: {
|
||||||
type: 'Sketch',
|
type: 'Sketch',
|
||||||
on: expect.any(Object),
|
on: expect.any(Object),
|
||||||
@ -29,7 +29,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [46, 71],
|
sourceRange: [46, 71, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
paths: [
|
paths: [
|
||||||
@ -39,7 +39,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
to: [-1.59, -1.54],
|
to: [-1.59, -1.54],
|
||||||
from: [0, 0],
|
from: [0, 0],
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [77, 102],
|
sourceRange: [77, 102, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -49,13 +49,13 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
from: [-1.59, -1.54],
|
from: [-1.59, -1.54],
|
||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [108, 132],
|
sourceRange: [108, 132, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
__meta: [{ sourceRange: [46, 71] }],
|
__meta: [{ sourceRange: [46, 71, 0] }],
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -80,14 +80,14 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [77, 102],
|
sourceRange: [77, 102, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [108, 132],
|
sourceRange: [108, 132, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
sketch: {
|
sketch: {
|
||||||
@ -104,7 +104,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [77, 102],
|
sourceRange: [77, 102, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -114,7 +114,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [108, 132],
|
sourceRange: [108, 132, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -122,7 +122,7 @@ const mySketch001 = startSketchOn('XY')
|
|||||||
height: 2,
|
height: 2,
|
||||||
startCapId: expect.any(String),
|
startCapId: expect.any(String),
|
||||||
endCapId: expect.any(String),
|
endCapId: expect.any(String),
|
||||||
__meta: [{ sourceRange: [46, 71] }],
|
__meta: [{ sourceRange: [46, 71, 0] }],
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
test('sketch extrude and sketch on one of the faces', async () => {
|
test('sketch extrude and sketch on one of the faces', async () => {
|
||||||
@ -162,7 +162,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [69, 89],
|
sourceRange: [69, 89, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
@ -174,14 +174,14 @@ const sk2 = startSketchOn('XY')
|
|||||||
value: 'p',
|
value: 'p',
|
||||||
},
|
},
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [95, 117],
|
sourceRange: [95, 117, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [123, 142],
|
sourceRange: [123, 142, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
sketch: {
|
sketch: {
|
||||||
@ -194,7 +194,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
p: {
|
p: {
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [114, 116],
|
sourceRange: [114, 116, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
type: 'TagIdentifier',
|
type: 'TagIdentifier',
|
||||||
@ -210,7 +210,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [69, 89],
|
sourceRange: [69, 89, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -225,7 +225,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
},
|
},
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [95, 117],
|
sourceRange: [95, 117, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -235,7 +235,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [123, 142],
|
sourceRange: [123, 142, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -243,7 +243,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
height: 2,
|
height: 2,
|
||||||
startCapId: expect.any(String),
|
startCapId: expect.any(String),
|
||||||
endCapId: expect.any(String),
|
endCapId: expect.any(String),
|
||||||
__meta: [{ sourceRange: [38, 63] }],
|
__meta: [{ sourceRange: [38, 63, 0] }],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'Solid',
|
type: 'Solid',
|
||||||
@ -254,7 +254,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [373, 393],
|
sourceRange: [373, 393, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
@ -266,14 +266,14 @@ const sk2 = startSketchOn('XY')
|
|||||||
value: 'o',
|
value: 'o',
|
||||||
},
|
},
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [399, 420],
|
sourceRange: [399, 420, 0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'extrudePlane',
|
type: 'extrudePlane',
|
||||||
faceId: expect.any(String),
|
faceId: expect.any(String),
|
||||||
tag: null,
|
tag: null,
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [426, 445],
|
sourceRange: [426, 445, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
sketch: {
|
sketch: {
|
||||||
@ -286,7 +286,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
o: {
|
o: {
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [417, 419],
|
sourceRange: [417, 419, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
type: 'TagIdentifier',
|
type: 'TagIdentifier',
|
||||||
@ -302,7 +302,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [373, 393],
|
sourceRange: [373, 393, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -317,7 +317,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
},
|
},
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [399, 420],
|
sourceRange: [399, 420, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -327,7 +327,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [426, 445],
|
sourceRange: [426, 445, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@ -335,7 +335,7 @@ const sk2 = startSketchOn('XY')
|
|||||||
height: 2,
|
height: 2,
|
||||||
startCapId: expect.any(String),
|
startCapId: expect.any(String),
|
||||||
endCapId: expect.any(String),
|
endCapId: expect.any(String),
|
||||||
__meta: [{ sourceRange: [342, 367] }],
|
__meta: [{ sourceRange: [342, 367, 0] }],
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
@ -9,8 +9,8 @@ describe('test kclErrToDiagnostic', () => {
|
|||||||
kind: 'semantic',
|
kind: 'semantic',
|
||||||
msg: 'Semantic error',
|
msg: 'Semantic error',
|
||||||
sourceRanges: [
|
sourceRanges: [
|
||||||
[0, 1],
|
[0, 1, 0],
|
||||||
[2, 3],
|
[2, 3, 0],
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -19,8 +19,8 @@ describe('test kclErrToDiagnostic', () => {
|
|||||||
kind: 'type',
|
kind: 'type',
|
||||||
msg: 'Type error',
|
msg: 'Type error',
|
||||||
sourceRanges: [
|
sourceRanges: [
|
||||||
[4, 5],
|
[4, 5, 0],
|
||||||
[6, 7],
|
[6, 7, 0],
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
@ -4,15 +4,17 @@ import { posToOffset } from '@kittycad/codemirror-lsp-client'
|
|||||||
import { Diagnostic as LspDiagnostic } from 'vscode-languageserver-protocol'
|
import { Diagnostic as LspDiagnostic } from 'vscode-languageserver-protocol'
|
||||||
import { Text } from '@codemirror/state'
|
import { Text } from '@codemirror/state'
|
||||||
|
|
||||||
|
const TOP_LEVEL_MODULE_ID = 0
|
||||||
|
|
||||||
type ExtractKind<T> = T extends { kind: infer K } ? K : never
|
type ExtractKind<T> = T extends { kind: infer K } ? K : never
|
||||||
export class KCLError extends Error {
|
export class KCLError extends Error {
|
||||||
kind: ExtractKind<RustKclError> | 'name'
|
kind: ExtractKind<RustKclError> | 'name'
|
||||||
sourceRanges: [number, number][]
|
sourceRanges: [number, number, number][]
|
||||||
msg: string
|
msg: string
|
||||||
constructor(
|
constructor(
|
||||||
kind: ExtractKind<RustKclError> | 'name',
|
kind: ExtractKind<RustKclError> | 'name',
|
||||||
msg: string,
|
msg: string,
|
||||||
sourceRanges: [number, number][]
|
sourceRanges: [number, number, number][]
|
||||||
) {
|
) {
|
||||||
super()
|
super()
|
||||||
this.kind = kind
|
this.kind = kind
|
||||||
@ -23,63 +25,63 @@ export class KCLError extends Error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class KCLLexicalError extends KCLError {
|
export class KCLLexicalError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('lexical', msg, sourceRanges)
|
super('lexical', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLInternalError extends KCLError {
|
export class KCLInternalError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('internal', msg, sourceRanges)
|
super('internal', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLSyntaxError extends KCLError {
|
export class KCLSyntaxError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('syntax', msg, sourceRanges)
|
super('syntax', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
Object.setPrototypeOf(this, KCLSyntaxError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLSemanticError extends KCLError {
|
export class KCLSemanticError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('semantic', msg, sourceRanges)
|
super('semantic', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLSemanticError.prototype)
|
Object.setPrototypeOf(this, KCLSemanticError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLTypeError extends KCLError {
|
export class KCLTypeError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('type', msg, sourceRanges)
|
super('type', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLTypeError.prototype)
|
Object.setPrototypeOf(this, KCLTypeError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLUnimplementedError extends KCLError {
|
export class KCLUnimplementedError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('unimplemented', msg, sourceRanges)
|
super('unimplemented', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLUnimplementedError.prototype)
|
Object.setPrototypeOf(this, KCLUnimplementedError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLUnexpectedError extends KCLError {
|
export class KCLUnexpectedError extends KCLError {
|
||||||
constructor(msg: string, sourceRanges: [number, number][]) {
|
constructor(msg: string, sourceRanges: [number, number, number][]) {
|
||||||
super('unexpected', msg, sourceRanges)
|
super('unexpected', msg, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLUnexpectedError.prototype)
|
Object.setPrototypeOf(this, KCLUnexpectedError.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLValueAlreadyDefined extends KCLError {
|
export class KCLValueAlreadyDefined extends KCLError {
|
||||||
constructor(key: string, sourceRanges: [number, number][]) {
|
constructor(key: string, sourceRanges: [number, number, number][]) {
|
||||||
super('name', `Key ${key} was already defined elsewhere`, sourceRanges)
|
super('name', `Key ${key} was already defined elsewhere`, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLValueAlreadyDefined.prototype)
|
Object.setPrototypeOf(this, KCLValueAlreadyDefined.prototype)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class KCLUndefinedValueError extends KCLError {
|
export class KCLUndefinedValueError extends KCLError {
|
||||||
constructor(key: string, sourceRanges: [number, number][]) {
|
constructor(key: string, sourceRanges: [number, number, number][]) {
|
||||||
super('name', `Key ${key} has not been defined`, sourceRanges)
|
super('name', `Key ${key} has not been defined`, sourceRanges)
|
||||||
Object.setPrototypeOf(this, KCLUndefinedValueError.prototype)
|
Object.setPrototypeOf(this, KCLUndefinedValueError.prototype)
|
||||||
}
|
}
|
||||||
@ -97,13 +99,22 @@ export function lspDiagnosticsToKclErrors(
|
|||||||
.flatMap(
|
.flatMap(
|
||||||
({ range, message }) =>
|
({ range, message }) =>
|
||||||
new KCLError('unexpected', message, [
|
new KCLError('unexpected', message, [
|
||||||
[posToOffset(doc, range.start)!, posToOffset(doc, range.end)!],
|
[
|
||||||
|
posToOffset(doc, range.start)!,
|
||||||
|
posToOffset(doc, range.end)!,
|
||||||
|
TOP_LEVEL_MODULE_ID,
|
||||||
|
],
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
.filter(({ sourceRanges }) => {
|
.filter(({ sourceRanges }) => {
|
||||||
const [from, to] = sourceRanges[0]
|
const [from, to, moduleId] = sourceRanges[0]
|
||||||
return (
|
return (
|
||||||
from !== null && to !== null && from !== undefined && to !== undefined
|
from !== null &&
|
||||||
|
to !== null &&
|
||||||
|
from !== undefined &&
|
||||||
|
to !== undefined &&
|
||||||
|
// Filter out errors that are not from the top-level module.
|
||||||
|
moduleId === TOP_LEVEL_MODULE_ID
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.sort((a, b) => {
|
.sort((a, b) => {
|
||||||
@ -127,8 +138,16 @@ export function kclErrorsToDiagnostics(
|
|||||||
errors: KCLError[]
|
errors: KCLError[]
|
||||||
): CodeMirrorDiagnostic[] {
|
): CodeMirrorDiagnostic[] {
|
||||||
return errors?.flatMap((err) => {
|
return errors?.flatMap((err) => {
|
||||||
return err.sourceRanges.map(([from, to]) => {
|
const sourceRanges: CodeMirrorDiagnostic[] = err.sourceRanges
|
||||||
return { from, to, message: err.msg, severity: 'error' }
|
// Filter out errors that are not from the top-level module.
|
||||||
})
|
.filter(([_start, _end, moduleId]) => moduleId === TOP_LEVEL_MODULE_ID)
|
||||||
|
.map(([from, to]) => {
|
||||||
|
return { from, to, message: err.msg, severity: 'error' }
|
||||||
|
})
|
||||||
|
// Make sure we didn't filter out all the source ranges.
|
||||||
|
if (sourceRanges.length === 0) {
|
||||||
|
sourceRanges.push({ from: 0, to: 0, message: err.msg, severity: 'error' })
|
||||||
|
}
|
||||||
|
return sourceRanges
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -65,7 +65,7 @@ const newVar = myVar + 1`
|
|||||||
to: [0, 2],
|
to: [0, 2],
|
||||||
from: [0, 0],
|
from: [0, 0],
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [72, 97],
|
sourceRange: [72, 97, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
tag: {
|
tag: {
|
||||||
@ -81,7 +81,7 @@ const newVar = myVar + 1`
|
|||||||
from: [0, 2],
|
from: [0, 2],
|
||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [103, 119],
|
sourceRange: [103, 119, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -90,7 +90,7 @@ const newVar = myVar + 1`
|
|||||||
to: [5, -1],
|
to: [5, -1],
|
||||||
from: [2, 3],
|
from: [2, 3],
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [125, 154],
|
sourceRange: [125, 154, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
tag: {
|
tag: {
|
||||||
@ -160,14 +160,14 @@ const newVar = myVar + 1`
|
|||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
sourceRange: [39, 63],
|
sourceRange: [39, 63, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
tags: {
|
tags: {
|
||||||
myPath: {
|
myPath: {
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [109, 116],
|
sourceRange: [109, 116, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
type: 'TagIdentifier',
|
type: 'TagIdentifier',
|
||||||
@ -182,7 +182,7 @@ const newVar = myVar + 1`
|
|||||||
from: [0, 0],
|
from: [0, 0],
|
||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [69, 85],
|
sourceRange: [69, 85, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -191,7 +191,7 @@ const newVar = myVar + 1`
|
|||||||
to: [0, 1],
|
to: [0, 1],
|
||||||
from: [1, 1],
|
from: [1, 1],
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [91, 117],
|
sourceRange: [91, 117, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
tag: {
|
tag: {
|
||||||
@ -207,15 +207,15 @@ const newVar = myVar + 1`
|
|||||||
from: [0, 1],
|
from: [0, 1],
|
||||||
tag: null,
|
tag: null,
|
||||||
__geoMeta: {
|
__geoMeta: {
|
||||||
sourceRange: [123, 139],
|
sourceRange: [123, 139, 0],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
id: expect.any(String),
|
id: expect.any(String),
|
||||||
__meta: [{ sourceRange: [39, 63] }],
|
__meta: [{ sourceRange: [39, 63, 0] }],
|
||||||
},
|
},
|
||||||
__meta: [{ sourceRange: [39, 63] }],
|
__meta: [{ sourceRange: [39, 63, 0] }],
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
it('execute array expression', async () => {
|
it('execute array expression', async () => {
|
||||||
@ -229,7 +229,7 @@ const newVar = myVar + 1`
|
|||||||
value: 3,
|
value: 3,
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [14, 15],
|
sourceRange: [14, 15, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
@ -238,7 +238,7 @@ const newVar = myVar + 1`
|
|||||||
value: [1, '2', 3, 9],
|
value: [1, '2', 3, 9],
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [27, 49],
|
sourceRange: [27, 49, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
@ -257,7 +257,7 @@ const newVar = myVar + 1`
|
|||||||
value: { aStr: 'str', anum: 2, identifier: 3, binExp: 9 },
|
value: { aStr: 'str', anum: 2, identifier: 3, binExp: 9 },
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [27, 83],
|
sourceRange: [27, 83, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
@ -272,7 +272,7 @@ const newVar = myVar + 1`
|
|||||||
value: '123',
|
value: '123',
|
||||||
__meta: [
|
__meta: [
|
||||||
{
|
{
|
||||||
sourceRange: [41, 50],
|
sourceRange: [41, 50, 0],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
@ -426,7 +426,7 @@ const theExtrude = startSketchOn('XY')
|
|||||||
new KCLError(
|
new KCLError(
|
||||||
'undefined_value',
|
'undefined_value',
|
||||||
'memory item key `myVarZ` is not defined',
|
'memory item key `myVarZ` is not defined',
|
||||||
[[129, 135]]
|
[[129, 135, 0]]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -101,15 +101,15 @@ describe('Testing findUniqueName', () => {
|
|||||||
it('should find a unique name', () => {
|
it('should find a unique name', () => {
|
||||||
const result = findUniqueName(
|
const result = findUniqueName(
|
||||||
JSON.stringify([
|
JSON.stringify([
|
||||||
{ type: 'Identifier', name: 'yo01', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo01', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo02', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo02', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo03', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo03', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo04', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo04', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo05', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo05', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo06', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo06', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo07', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo07', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo08', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo08', start: 0, end: 0, moduleId: 0 },
|
||||||
{ type: 'Identifier', name: 'yo09', start: 0, end: 0 },
|
{ type: 'Identifier', name: 'yo09', start: 0, end: 0, moduleId: 0 },
|
||||||
] satisfies Node<Identifier>[]),
|
] satisfies Node<Identifier>[]),
|
||||||
'yo',
|
'yo',
|
||||||
2
|
2
|
||||||
@ -124,6 +124,7 @@ describe('Testing addSketchTo', () => {
|
|||||||
body: [],
|
body: [],
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
nonCodeMeta: { nonCodeNodes: {}, startNodes: [] },
|
nonCodeMeta: { nonCodeNodes: {}, startNodes: [] },
|
||||||
},
|
},
|
||||||
'yz'
|
'yz'
|
||||||
|
@ -242,6 +242,7 @@ export function mutateObjExpProp(
|
|||||||
value: updateWith,
|
value: updateWith,
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -577,6 +578,7 @@ export function createLiteral(value: string | number): Node<Literal> {
|
|||||||
type: 'Literal',
|
type: 'Literal',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
value,
|
value,
|
||||||
raw: `${value}`,
|
raw: `${value}`,
|
||||||
}
|
}
|
||||||
@ -587,6 +589,7 @@ export function createTagDeclarator(value: string): Node<TagDeclarator> {
|
|||||||
type: 'TagDeclarator',
|
type: 'TagDeclarator',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
value,
|
value,
|
||||||
}
|
}
|
||||||
@ -597,6 +600,7 @@ export function createIdentifier(name: string): Node<Identifier> {
|
|||||||
type: 'Identifier',
|
type: 'Identifier',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
name,
|
name,
|
||||||
}
|
}
|
||||||
@ -607,6 +611,7 @@ export function createPipeSubstitution(): Node<PipeSubstitution> {
|
|||||||
type: 'PipeSubstitution',
|
type: 'PipeSubstitution',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -618,10 +623,12 @@ export function createCallExpressionStdLib(
|
|||||||
type: 'CallExpression',
|
type: 'CallExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
callee: {
|
callee: {
|
||||||
type: 'Identifier',
|
type: 'Identifier',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
name,
|
name,
|
||||||
},
|
},
|
||||||
@ -638,10 +645,12 @@ export function createCallExpression(
|
|||||||
type: 'CallExpression',
|
type: 'CallExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
callee: {
|
callee: {
|
||||||
type: 'Identifier',
|
type: 'Identifier',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
name,
|
name,
|
||||||
},
|
},
|
||||||
@ -657,6 +666,7 @@ export function createArrayExpression(
|
|||||||
type: 'ArrayExpression',
|
type: 'ArrayExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
nonCodeMeta: nonCodeMetaEmpty(),
|
nonCodeMeta: nonCodeMetaEmpty(),
|
||||||
elements,
|
elements,
|
||||||
@ -670,6 +680,7 @@ export function createPipeExpression(
|
|||||||
type: 'PipeExpression',
|
type: 'PipeExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
body,
|
body,
|
||||||
nonCodeMeta: nonCodeMetaEmpty(),
|
nonCodeMeta: nonCodeMetaEmpty(),
|
||||||
@ -686,12 +697,14 @@ export function createVariableDeclaration(
|
|||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
declarations: [
|
declarations: [
|
||||||
{
|
{
|
||||||
type: 'VariableDeclarator',
|
type: 'VariableDeclarator',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
id: createIdentifier(varName),
|
id: createIdentifier(varName),
|
||||||
init,
|
init,
|
||||||
@ -709,12 +722,14 @@ export function createObjectExpression(properties: {
|
|||||||
type: 'ObjectExpression',
|
type: 'ObjectExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
nonCodeMeta: nonCodeMetaEmpty(),
|
nonCodeMeta: nonCodeMetaEmpty(),
|
||||||
properties: Object.entries(properties).map(([key, value]) => ({
|
properties: Object.entries(properties).map(([key, value]) => ({
|
||||||
type: 'ObjectProperty',
|
type: 'ObjectProperty',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
key: createIdentifier(key),
|
key: createIdentifier(key),
|
||||||
|
|
||||||
value,
|
value,
|
||||||
@ -730,6 +745,7 @@ export function createUnaryExpression(
|
|||||||
type: 'UnaryExpression',
|
type: 'UnaryExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
operator,
|
operator,
|
||||||
argument,
|
argument,
|
||||||
@ -745,6 +761,7 @@ export function createBinaryExpression([left, operator, right]: [
|
|||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
|
|
||||||
operator,
|
operator,
|
||||||
left,
|
left,
|
||||||
|
@ -13,6 +13,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
37,
|
37,
|
||||||
64,
|
64,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"pathIds": [
|
"pathIds": [
|
||||||
@ -31,6 +32,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
37,
|
37,
|
||||||
64,
|
64,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"planeId": "UUID",
|
"planeId": "UUID",
|
||||||
@ -56,6 +58,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
70,
|
70,
|
||||||
86,
|
86,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -77,6 +80,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
92,
|
92,
|
||||||
119,
|
119,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeCutId": "UUID",
|
"edgeCutId": "UUID",
|
||||||
@ -99,6 +103,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
125,
|
125,
|
||||||
150,
|
150,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -120,6 +125,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
156,
|
156,
|
||||||
203,
|
203,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -141,6 +147,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
209,
|
209,
|
||||||
217,
|
217,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [],
|
"edgeIds": [],
|
||||||
@ -162,6 +169,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
231,
|
231,
|
||||||
254,
|
254,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -289,6 +297,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
260,
|
260,
|
||||||
299,
|
299,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"consumedEdgeId": "UUID",
|
"consumedEdgeId": "UUID",
|
||||||
@ -307,6 +316,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
350,
|
350,
|
||||||
377,
|
377,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"planeId": "UUID",
|
"planeId": "UUID",
|
||||||
@ -331,6 +341,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
383,
|
383,
|
||||||
398,
|
398,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -352,6 +363,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
404,
|
404,
|
||||||
420,
|
420,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -373,6 +385,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
426,
|
426,
|
||||||
473,
|
473,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
@ -394,6 +407,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
479,
|
479,
|
||||||
487,
|
487,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [],
|
"edgeIds": [],
|
||||||
@ -415,6 +429,7 @@ Map {
|
|||||||
"range": [
|
"range": [
|
||||||
501,
|
501,
|
||||||
522,
|
522,
|
||||||
|
0,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
"edgeIds": [
|
"edgeIds": [
|
||||||
|
@ -610,7 +610,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
sweepId: '',
|
sweepId: '',
|
||||||
codeRef: {
|
codeRef: {
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
range: [37, 64],
|
range: [37, 64, 0],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
@ -622,7 +622,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: [],
|
surfaceIds: [],
|
||||||
edgeIds: [],
|
edgeIds: [],
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -632,7 +632,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
planeId: expect.any(String),
|
planeId: expect.any(String),
|
||||||
sweepId: expect.any(String),
|
sweepId: expect.any(String),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [37, 64],
|
range: [37, 64, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
solid2dId: expect.any(String),
|
solid2dId: expect.any(String),
|
||||||
@ -645,7 +645,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: '',
|
surfaceId: '',
|
||||||
edgeIds: [],
|
edgeIds: [],
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [70, 86],
|
range: [70, 86, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -655,7 +655,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
planeId: expect.any(String),
|
planeId: expect.any(String),
|
||||||
sweepId: expect.any(String),
|
sweepId: expect.any(String),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [37, 64],
|
range: [37, 64, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
solid2dId: expect.any(String),
|
solid2dId: expect.any(String),
|
||||||
@ -669,7 +669,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
edgeIds: [],
|
edgeIds: [],
|
||||||
surfaceId: '',
|
surfaceId: '',
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [260, 299],
|
range: [260, 299, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -679,7 +679,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: expect.any(String),
|
surfaceId: expect.any(String),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [92, 119],
|
range: [92, 119, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
edgeCutId: expect.any(String),
|
edgeCutId: expect.any(String),
|
||||||
@ -699,7 +699,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: expect.any(String),
|
surfaceId: expect.any(String),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [156, 203],
|
range: [156, 203, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -710,7 +710,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -727,7 +727,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: expect.any(String),
|
surfaceId: expect.any(String),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [125, 150],
|
range: [125, 150, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -738,7 +738,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -755,7 +755,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: expect.any(String),
|
surfaceId: expect.any(String),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [92, 119],
|
range: [92, 119, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
edgeCutId: expect.any(String),
|
edgeCutId: expect.any(String),
|
||||||
@ -767,7 +767,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -784,7 +784,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceId: expect.any(String),
|
surfaceId: expect.any(String),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [70, 86],
|
range: [70, 86, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -795,7 +795,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -813,7 +813,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -831,7 +831,7 @@ describe('testing getArtifactsToUpdate', () => {
|
|||||||
surfaceIds: expect.any(Array),
|
surfaceIds: expect.any(Array),
|
||||||
edgeIds: expect.any(Array),
|
edgeIds: expect.any(Array),
|
||||||
codeRef: {
|
codeRef: {
|
||||||
range: [231, 254],
|
range: [231, 254, 0],
|
||||||
pathToNode: [['body', '']],
|
pathToNode: [['body', '']],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1823,11 +1823,13 @@ export const updateStartProfileAtArgs: SketchLineHelper['updateArgs'] = ({
|
|||||||
modifiedAst: {
|
modifiedAst: {
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
body: [],
|
body: [],
|
||||||
|
|
||||||
nonCodeMeta: {
|
nonCodeMeta: {
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
moduleId: 0,
|
||||||
startNodes: [],
|
startNodes: [],
|
||||||
nonCodeNodes: [],
|
nonCodeNodes: [],
|
||||||
},
|
},
|
||||||
|
@ -120,8 +120,8 @@ const initialise = async () => {
|
|||||||
|
|
||||||
export const initPromise = initialise()
|
export const initPromise = initialise()
|
||||||
|
|
||||||
export const rangeTypeFix = (ranges: number[][]): [number, number][] =>
|
export const rangeTypeFix = (ranges: number[][]): [number, number, number][] =>
|
||||||
ranges.map(([start, end]) => [start, end])
|
ranges.map(([start, end, moduleId]) => [start, end, moduleId])
|
||||||
|
|
||||||
export const parse = (code: string | Error): Node<Program> | Error => {
|
export const parse = (code: string | Error): Node<Program> | Error => {
|
||||||
if (err(code)) return code
|
if (err(code)) return code
|
||||||
|
2
src/wasm-lib/Cargo.lock
generated
2
src/wasm-lib/Cargo.lock
generated
@ -3083,6 +3083,7 @@ dependencies = [
|
|||||||
"chrono",
|
"chrono",
|
||||||
"dyn-clone",
|
"dyn-clone",
|
||||||
"indexmap 1.9.3",
|
"indexmap 1.9.3",
|
||||||
|
"indexmap 2.6.0",
|
||||||
"schemars_derive",
|
"schemars_derive",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -3883,6 +3884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "3a2f31991cee3dce1ca4f929a8a04fdd11fd8801aac0f2030b0fa8a0a3fef6b9"
|
checksum = "3a2f31991cee3dce1ca4f929a8a04fdd11fd8801aac0f2030b0fa8a0a3fef6b9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
|
"indexmap 2.6.0",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror 1.0.68",
|
"thiserror 1.0.68",
|
||||||
|
@ -173,9 +173,7 @@ fn do_stdlib_inner(
|
|||||||
quote! {
|
quote! {
|
||||||
let code_blocks = vec![#(#cb),*];
|
let code_blocks = vec![#(#cb),*];
|
||||||
code_blocks.iter().map(|cb| {
|
code_blocks.iter().map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
@ -750,9 +748,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
|||||||
quote! {
|
quote! {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn #test_name_mock() {
|
async fn #test_name_mock() {
|
||||||
let tokens = crate::token::lexer(#code_block).unwrap();
|
let program = crate::parser::top_level_parse(#code_block).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await.unwrap())),
|
engine: std::sync::Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await.unwrap())),
|
||||||
|
@ -2,9 +2,7 @@
|
|||||||
mod test_examples_someFn {
|
mod test_examples_someFn {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_someFn0() {
|
async fn test_mock_example_someFn0() {
|
||||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
let program = crate::parser::top_level_parse("someFn()").unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +111,7 @@ impl crate::docs::StdLibFn for SomeFn {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,7 @@
|
|||||||
mod test_examples_someFn {
|
mod test_examples_someFn {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_someFn0() {
|
async fn test_mock_example_someFn0() {
|
||||||
let tokens = crate::token::lexer("someFn()").unwrap();
|
let program = crate::parser::top_level_parse("someFn()").unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +111,7 @@ impl crate::docs::StdLibFn for SomeFn {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,9 @@
|
|||||||
mod test_examples_show {
|
mod test_examples_show {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_show0() {
|
async fn test_mock_example_show0() {
|
||||||
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nshow").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nshow")
|
||||||
let program = parser.ast().unwrap();
|
.unwrap();
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -36,9 +36,8 @@ mod test_examples_show {
|
|||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_show1() {
|
async fn test_mock_example_show1() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -150,9 +149,7 @@ impl crate::docs::StdLibFn for Show {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_show {
|
mod test_examples_show {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_show0() {
|
async fn test_mock_example_show0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,10 +2,9 @@
|
|||||||
mod test_examples_my_func {
|
mod test_examples_my_func {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_my_func0() {
|
async fn test_mock_example_my_func0() {
|
||||||
let tokens =
|
let program =
|
||||||
crate::token::lexer("This is another code block.\nyes sirrr.\nmyFunc").unwrap();
|
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nmyFunc")
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
.unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -37,9 +36,8 @@ mod test_examples_my_func {
|
|||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_my_func1() {
|
async fn test_mock_example_my_func1() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmyFunc").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nmyFunc").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -151,9 +149,7 @@ impl crate::docs::StdLibFn for MyFunc {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,10 +2,9 @@
|
|||||||
mod test_examples_line_to {
|
mod test_examples_line_to {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_line_to0() {
|
async fn test_mock_example_line_to0() {
|
||||||
let tokens =
|
let program =
|
||||||
crate::token::lexer("This is another code block.\nyes sirrr.\nlineTo").unwrap();
|
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nlineTo")
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
.unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -37,9 +36,8 @@ mod test_examples_line_to {
|
|||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_line_to1() {
|
async fn test_mock_example_line_to1() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nlineTo").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nlineTo").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -159,9 +157,7 @@ impl crate::docs::StdLibFn for LineTo {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_min {
|
mod test_examples_min {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_min0() {
|
async fn test_mock_example_min0() {
|
||||||
let tokens = crate::token::lexer("This is another code block.\nyes sirrr.\nmin").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is another code block.\nyes sirrr.\nmin").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -36,9 +35,8 @@ mod test_examples_min {
|
|||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_min1() {
|
async fn test_mock_example_min1() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nmin").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nmin").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -150,9 +148,7 @@ impl crate::docs::StdLibFn for Min {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_show {
|
mod test_examples_show {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_show0() {
|
async fn test_mock_example_show0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_import {
|
mod test_examples_import {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_import0() {
|
async fn test_mock_example_import0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_import {
|
mod test_examples_import {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_import0() {
|
async fn test_mock_example_import0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_import {
|
mod test_examples_import {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_import0() {
|
async fn test_mock_example_import0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nimport").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nimport").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Import {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,8 @@
|
|||||||
mod test_examples_show {
|
mod test_examples_show {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_show0() {
|
async fn test_mock_example_show0() {
|
||||||
let tokens = crate::token::lexer("This is code.\nIt does other shit.\nshow").unwrap();
|
let program =
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
crate::parser::top_level_parse("This is code.\nIt does other shit.\nshow").unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -113,9 +112,7 @@ impl crate::docs::StdLibFn for Show {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -2,9 +2,7 @@
|
|||||||
mod test_examples_some_function {
|
mod test_examples_some_function {
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_mock_example_some_function0() {
|
async fn test_mock_example_some_function0() {
|
||||||
let tokens = crate::token::lexer("someFunction()").unwrap();
|
let program = crate::parser::top_level_parse("someFunction()").unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let id_generator = crate::executor::IdGenerator::default();
|
let id_generator = crate::executor::IdGenerator::default();
|
||||||
let ctx = crate::executor::ExecutorContext {
|
let ctx = crate::executor::ExecutorContext {
|
||||||
engine: std::sync::Arc::new(Box::new(
|
engine: std::sync::Arc::new(Box::new(
|
||||||
@ -108,9 +106,7 @@ impl crate::docs::StdLibFn for SomeFunction {
|
|||||||
code_blocks
|
code_blocks
|
||||||
.iter()
|
.iter()
|
||||||
.map(|cb| {
|
.map(|cb| {
|
||||||
let tokens = crate::token::lexer(cb).unwrap();
|
let program = crate::parser::top_level_parse(cb).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let mut options: crate::ast::types::FormatOptions = Default::default();
|
let mut options: crate::ast::types::FormatOptions = Default::default();
|
||||||
options.insert_final_newline = false;
|
options.insert_final_newline = false;
|
||||||
program.recast(&options, 0)
|
program.recast(&options, 0)
|
||||||
|
@ -16,8 +16,7 @@ use syn::{parse_macro_input, LitStr};
|
|||||||
pub fn parse(input: TokenStream) -> TokenStream {
|
pub fn parse(input: TokenStream) -> TokenStream {
|
||||||
let input = parse_macro_input!(input as LitStr);
|
let input = parse_macro_input!(input as LitStr);
|
||||||
let kcl_src = input.value();
|
let kcl_src = input.value();
|
||||||
let tokens = kcl_lib::token::lexer(&kcl_src).unwrap();
|
let ast = kcl_lib::parser::top_level_parse(&kcl_src).unwrap();
|
||||||
let ast = kcl_lib::parser::Parser::new(tokens).ast().unwrap();
|
|
||||||
let ast_struct = ast.bake(&Default::default());
|
let ast_struct = ast.bake(&Default::default());
|
||||||
quote!(#ast_struct).into()
|
quote!(#ast_struct).into()
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
use kcl_lib::ast::types::{
|
use kcl_lib::ast::types::{
|
||||||
BodyItem, Expr, Identifier, ItemVisibility, Literal, LiteralValue, Node, Program, VariableDeclaration,
|
BodyItem, Expr, Identifier, ItemVisibility, Literal, LiteralValue, ModuleId, Node, Program, VariableDeclaration,
|
||||||
VariableDeclarator, VariableKind,
|
VariableDeclarator, VariableKind,
|
||||||
};
|
};
|
||||||
use kcl_macros::parse;
|
use kcl_macros::parse;
|
||||||
@ -9,6 +9,7 @@ use pretty_assertions::assert_eq;
|
|||||||
#[test]
|
#[test]
|
||||||
fn basic() {
|
fn basic() {
|
||||||
let actual = parse!("const y = 4");
|
let actual = parse!("const y = 4");
|
||||||
|
let module_id = ModuleId::default();
|
||||||
let expected = Node {
|
let expected = Node {
|
||||||
inner: Program {
|
inner: Program {
|
||||||
body: vec![BodyItem::VariableDeclaration(Box::new(Node::new(
|
body: vec![BodyItem::VariableDeclaration(Box::new(Node::new(
|
||||||
@ -22,6 +23,7 @@ fn basic() {
|
|||||||
},
|
},
|
||||||
6,
|
6,
|
||||||
7,
|
7,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
init: Expr::Literal(Box::new(Node::new(
|
init: Expr::Literal(Box::new(Node::new(
|
||||||
Literal {
|
Literal {
|
||||||
@ -31,11 +33,13 @@ fn basic() {
|
|||||||
},
|
},
|
||||||
10,
|
10,
|
||||||
11,
|
11,
|
||||||
|
module_id,
|
||||||
))),
|
))),
|
||||||
digest: None,
|
digest: None,
|
||||||
},
|
},
|
||||||
6,
|
6,
|
||||||
11,
|
11,
|
||||||
|
module_id,
|
||||||
)],
|
)],
|
||||||
visibility: ItemVisibility::Default,
|
visibility: ItemVisibility::Default,
|
||||||
kind: VariableKind::Const,
|
kind: VariableKind::Const,
|
||||||
@ -43,12 +47,14 @@ fn basic() {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
11,
|
11,
|
||||||
|
module_id,
|
||||||
)))],
|
)))],
|
||||||
non_code_meta: Default::default(),
|
non_code_meta: Default::default(),
|
||||||
digest: None,
|
digest: None,
|
||||||
},
|
},
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 11,
|
end: 11,
|
||||||
|
module_id,
|
||||||
};
|
};
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ use hyper::{
|
|||||||
service::{make_service_fn, service_fn},
|
service::{make_service_fn, service_fn},
|
||||||
Body, Error, Response, Server,
|
Body, Error, Response, Server,
|
||||||
};
|
};
|
||||||
use kcl_lib::{executor::ExecutorContext, settings::types::UnitLength, test_server::RequestBody};
|
use kcl_lib::{ast::types::ModuleId, executor::ExecutorContext, settings::types::UnitLength, test_server::RequestBody};
|
||||||
use tokio::{
|
use tokio::{
|
||||||
sync::{mpsc, oneshot},
|
sync::{mpsc, oneshot},
|
||||||
task::JoinHandle,
|
task::JoinHandle,
|
||||||
@ -157,7 +157,8 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
|
|||||||
Err(e) => return bad_request(format!("Invalid request JSON: {e}")),
|
Err(e) => return bad_request(format!("Invalid request JSON: {e}")),
|
||||||
};
|
};
|
||||||
let RequestBody { kcl_program, test_name } = body;
|
let RequestBody { kcl_program, test_name } = body;
|
||||||
let parser = match kcl_lib::token::lexer(&kcl_program) {
|
let module_id = ModuleId::default();
|
||||||
|
let parser = match kcl_lib::token::lexer(&kcl_program, module_id) {
|
||||||
Ok(ts) => kcl_lib::parser::Parser::new(ts),
|
Ok(ts) => kcl_lib::parser::Parser::new(ts),
|
||||||
Err(e) => return bad_request(format!("tokenization error: {e}")),
|
Err(e) => return bad_request(format!("tokenization error: {e}")),
|
||||||
};
|
};
|
||||||
|
@ -7,9 +7,7 @@ mod conn_mock_core;
|
|||||||
|
|
||||||
///Converts the given kcl code to an engine test
|
///Converts the given kcl code to an engine test
|
||||||
pub async fn kcl_to_engine_core(code: &str) -> Result<String> {
|
pub async fn kcl_to_engine_core(code: &str) -> Result<String> {
|
||||||
let tokens = kcl_lib::token::lexer(code)?;
|
let program = kcl_lib::parser::top_level_parse(code)?;
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast()?;
|
|
||||||
|
|
||||||
let result = Arc::new(Mutex::new("".into()));
|
let result = Arc::new(Mutex::new("".into()));
|
||||||
let ref_result = Arc::clone(&result);
|
let ref_result = Arc::clone(&result);
|
||||||
|
@ -37,14 +37,14 @@ parse-display = "0.9.1"
|
|||||||
pyo3 = { version = "0.22.6", optional = true }
|
pyo3 = { version = "0.22.6", optional = true }
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["stream", "rustls-tls"] }
|
reqwest = { version = "0.12", default-features = false, features = ["stream", "rustls-tls"] }
|
||||||
ropey = "1.6.1"
|
ropey = "1.6.1"
|
||||||
schemars = { version = "0.8.17", features = ["impl_json_schema", "url", "uuid1", "preserve_order"] }
|
schemars = { version = "0.8.17", features = ["impl_json_schema", "indexmap2", "url", "uuid1", "preserve_order"] }
|
||||||
serde = { version = "1.0.214", features = ["derive"] }
|
serde = { version = "1.0.214", features = ["derive"] }
|
||||||
serde_json = "1.0.128"
|
serde_json = "1.0.128"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tabled = { version = "0.15.0", optional = true }
|
tabled = { version = "0.15.0", optional = true }
|
||||||
thiserror = "2.0.0"
|
thiserror = "2.0.0"
|
||||||
toml = "0.8.19"
|
toml = "0.8.19"
|
||||||
ts-rs = { version = "10.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "no-serde-warnings", "serde-json-impl"] }
|
ts-rs = { version = "10.0.0", features = ["uuid-impl", "url-impl", "chrono-impl", "indexmap-impl", "no-serde-warnings", "serde-json-impl"] }
|
||||||
url = { version = "2.5.3", features = ["serde"] }
|
url = { version = "2.5.3", features = ["serde"] }
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
uuid = { version = "1.11.0", features = ["v4", "js", "serde"] }
|
uuid = { version = "1.11.0", features = ["v4", "js", "serde"] }
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||||
|
|
||||||
pub fn bench_lex(c: &mut Criterion) {
|
pub fn bench_lex(c: &mut Criterion) {
|
||||||
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM)));
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM)));
|
c.bench_function("lex_cube", |b| b.iter(|| lex(CUBE_PROGRAM, module_id)));
|
||||||
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM)));
|
c.bench_function("lex_big_kitt", |b| b.iter(|| lex(KITT_PROGRAM, module_id)));
|
||||||
|
c.bench_function("lex_pipes_on_pipes", |b| b.iter(|| lex(PIPES_PROGRAM, module_id)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bench_parse(c: &mut Criterion) {
|
pub fn bench_parse(c: &mut Criterion) {
|
||||||
@ -15,7 +16,8 @@ pub fn bench_parse(c: &mut Criterion) {
|
|||||||
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
||||||
("koch snowflake", LSYSTEM_KOCH_SNOWFLAKE_PROGRAM),
|
("koch snowflake", LSYSTEM_KOCH_SNOWFLAKE_PROGRAM),
|
||||||
] {
|
] {
|
||||||
let tokens = kcl_lib::token::lexer(file).unwrap();
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
let tokens = kcl_lib::token::lexer(file, module_id).unwrap();
|
||||||
c.bench_function(&format!("parse_{name}"), move |b| {
|
c.bench_function(&format!("parse_{name}"), move |b| {
|
||||||
let tok = tokens.clone();
|
let tok = tokens.clone();
|
||||||
b.iter(move || {
|
b.iter(move || {
|
||||||
@ -26,8 +28,8 @@ pub fn bench_parse(c: &mut Criterion) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lex(program: &str) {
|
fn lex(program: &str, module_id: kcl_lib::ast::types::ModuleId) {
|
||||||
black_box(kcl_lib::token::lexer(program).unwrap());
|
black_box(kcl_lib::token::lexer(program, module_id).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(benches, bench_lex, bench_parse);
|
criterion_group!(benches, bench_lex, bench_parse);
|
||||||
|
@ -1,26 +1,32 @@
|
|||||||
use iai::black_box;
|
use iai::black_box;
|
||||||
|
|
||||||
pub fn parse(program: &str) {
|
pub fn parse(program: &str) {
|
||||||
let tokens = kcl_lib::token::lexer(program).unwrap();
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
let tokens = kcl_lib::token::lexer(program, module_id).unwrap();
|
||||||
let tok = tokens.clone();
|
let tok = tokens.clone();
|
||||||
let parser = kcl_lib::parser::Parser::new(tok.clone());
|
let parser = kcl_lib::parser::Parser::new(tok.clone());
|
||||||
black_box(parser.ast().unwrap());
|
black_box(parser.ast().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lex_kitt() {
|
fn lex_kitt() {
|
||||||
black_box(kcl_lib::token::lexer(KITT_PROGRAM).unwrap());
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
black_box(kcl_lib::token::lexer(KITT_PROGRAM, module_id).unwrap());
|
||||||
}
|
}
|
||||||
fn lex_pipes() {
|
fn lex_pipes() {
|
||||||
black_box(kcl_lib::token::lexer(PIPES_PROGRAM).unwrap());
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
black_box(kcl_lib::token::lexer(PIPES_PROGRAM, module_id).unwrap());
|
||||||
}
|
}
|
||||||
fn lex_cube() {
|
fn lex_cube() {
|
||||||
black_box(kcl_lib::token::lexer(CUBE_PROGRAM).unwrap());
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
black_box(kcl_lib::token::lexer(CUBE_PROGRAM, module_id).unwrap());
|
||||||
}
|
}
|
||||||
fn lex_math() {
|
fn lex_math() {
|
||||||
black_box(kcl_lib::token::lexer(MATH_PROGRAM).unwrap());
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
black_box(kcl_lib::token::lexer(MATH_PROGRAM, module_id).unwrap());
|
||||||
}
|
}
|
||||||
fn lex_lsystem() {
|
fn lex_lsystem() {
|
||||||
black_box(kcl_lib::token::lexer(LSYSTEM_PROGRAM).unwrap());
|
let module_id = kcl_lib::ast::types::ModuleId::default();
|
||||||
|
black_box(kcl_lib::token::lexer(LSYSTEM_PROGRAM, module_id).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_kitt() {
|
fn parse_kitt() {
|
||||||
|
@ -9,8 +9,7 @@ pub fn bench_digest(c: &mut Criterion) {
|
|||||||
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
("mike_stress_test", MIKE_STRESS_TEST_PROGRAM),
|
||||||
("lsystem", LSYSTEM_PROGRAM),
|
("lsystem", LSYSTEM_PROGRAM),
|
||||||
] {
|
] {
|
||||||
let tokens = kcl_lib::token::lexer(file).unwrap();
|
let prog = kcl_lib::parser::top_level_parse(file).unwrap();
|
||||||
let prog = kcl_lib::parser::Parser::new(tokens).ast().unwrap();
|
|
||||||
c.bench_function(&format!("digest_{name}"), move |b| {
|
c.bench_function(&format!("digest_{name}"), move |b| {
|
||||||
let prog = prog.clone();
|
let prog = prog.clone();
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ use crate::{
|
|||||||
executor::{Point2d, SourceRange},
|
executor::{Point2d, SourceRange},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::types::Node;
|
use super::types::{ModuleId, Node};
|
||||||
|
|
||||||
type Point3d = kcmc::shared::Point3d<f64>;
|
type Point3d = kcmc::shared::Point3d<f64>;
|
||||||
|
|
||||||
@ -38,6 +38,7 @@ const EPSILON: f64 = 0.015625; // or 2^-6
|
|||||||
pub async fn modify_ast_for_sketch(
|
pub async fn modify_ast_for_sketch(
|
||||||
engine: &Arc<Box<dyn EngineManager>>,
|
engine: &Arc<Box<dyn EngineManager>>,
|
||||||
program: &mut Node<Program>,
|
program: &mut Node<Program>,
|
||||||
|
module_id: ModuleId,
|
||||||
// The name of the sketch.
|
// The name of the sketch.
|
||||||
sketch_name: &str,
|
sketch_name: &str,
|
||||||
// The type of plane the sketch is on. `XY` or `XZ`, etc
|
// The type of plane the sketch is on. `XY` or `XZ`, etc
|
||||||
@ -183,9 +184,7 @@ pub async fn modify_ast_for_sketch(
|
|||||||
let recasted = program.recast(&FormatOptions::default(), 0);
|
let recasted = program.recast(&FormatOptions::default(), 0);
|
||||||
|
|
||||||
// Re-parse the ast so we get the correct source ranges.
|
// Re-parse the ast so we get the correct source ranges.
|
||||||
let tokens = crate::token::lexer(&recasted)?;
|
*program = crate::parser::parse(&recasted, module_id)?;
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
*program = parser.ast()?;
|
|
||||||
|
|
||||||
Ok(recasted)
|
Ok(recasted)
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,7 @@ pub(crate) mod digest;
|
|||||||
pub(crate) mod execute;
|
pub(crate) mod execute;
|
||||||
mod literal_value;
|
mod literal_value;
|
||||||
mod none;
|
mod none;
|
||||||
|
pub(crate) mod source_range;
|
||||||
|
|
||||||
use digest::Digest;
|
use digest::Digest;
|
||||||
|
|
||||||
@ -48,11 +49,14 @@ pub enum Definition<'a> {
|
|||||||
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, Bake)]
|
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq, ts_rs::TS, Bake)]
|
||||||
#[databake(path = kcl_lib::ast::types)]
|
#[databake(path = kcl_lib::ast::types)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Node<T> {
|
pub struct Node<T> {
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub inner: T,
|
pub inner: T,
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
|
#[serde(default, skip_serializing_if = "ModuleId::is_top_level")]
|
||||||
|
pub module_id: ModuleId,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
|
impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
|
||||||
@ -78,8 +82,13 @@ impl<T: JsonSchema> schemars::JsonSchema for Node<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Node<T> {
|
impl<T> Node<T> {
|
||||||
pub fn new(inner: T, start: usize, end: usize) -> Self {
|
pub fn new(inner: T, start: usize, end: usize, module_id: ModuleId) -> Self {
|
||||||
Self { inner, start, end }
|
Self {
|
||||||
|
inner,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
module_id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn no_src(inner: T) -> Self {
|
pub fn no_src(inner: T) -> Self {
|
||||||
@ -87,15 +96,21 @@ impl<T> Node<T> {
|
|||||||
inner,
|
inner,
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
module_id: ModuleId::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn boxed(inner: T, start: usize, end: usize) -> BoxNode<T> {
|
pub fn boxed(inner: T, start: usize, end: usize, module_id: ModuleId) -> BoxNode<T> {
|
||||||
Box::new(Node { inner, start, end })
|
Box::new(Node {
|
||||||
|
inner,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
module_id,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
|
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
|
||||||
vec![SourceRange([self.start, self.end])]
|
vec![SourceRange([self.start, self.end, self.module_id.as_usize()])]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,19 +136,19 @@ impl<T: fmt::Display> fmt::Display for Node<T> {
|
|||||||
|
|
||||||
impl<T> From<Node<T>> for crate::executor::SourceRange {
|
impl<T> From<Node<T>> for crate::executor::SourceRange {
|
||||||
fn from(v: Node<T>) -> Self {
|
fn from(v: Node<T>) -> Self {
|
||||||
Self([v.start, v.end])
|
Self([v.start, v.end, v.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<&Node<T>> for crate::executor::SourceRange {
|
impl<T> From<&Node<T>> for crate::executor::SourceRange {
|
||||||
fn from(v: &Node<T>) -> Self {
|
fn from(v: &Node<T>) -> Self {
|
||||||
Self([v.start, v.end])
|
Self([v.start, v.end, v.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<&BoxNode<T>> for crate::executor::SourceRange {
|
impl<T> From<&BoxNode<T>> for crate::executor::SourceRange {
|
||||||
fn from(v: &BoxNode<T>) -> Self {
|
fn from(v: &BoxNode<T>) -> Self {
|
||||||
Self([v.start, v.end])
|
Self([v.start, v.end, v.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -505,6 +520,29 @@ impl Program {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Identifier of a source file. Uses a u32 to keep the size small.
|
||||||
|
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize, ts_rs::TS, JsonSchema, Bake)]
|
||||||
|
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||||
|
#[databake(path = kcl_lib::ast::types)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct ModuleId(pub u32);
|
||||||
|
|
||||||
|
impl ModuleId {
|
||||||
|
pub fn from_usize(id: usize) -> Self {
|
||||||
|
Self(u32::try_from(id).expect("module ID should fit in a u32"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_usize(&self) -> usize {
|
||||||
|
usize::try_from(self.0).expect("module ID should fit in a usize")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Top-level file is the one being executed.
|
||||||
|
/// Represented by module ID of 0, i.e. the default value.
|
||||||
|
pub fn is_top_level(&self) -> bool {
|
||||||
|
*self == Self::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema, Bake)]
|
||||||
#[databake(path = kcl_lib::ast::types)]
|
#[databake(path = kcl_lib::ast::types)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
@ -538,13 +576,13 @@ impl BodyItem {
|
|||||||
|
|
||||||
impl From<BodyItem> for SourceRange {
|
impl From<BodyItem> for SourceRange {
|
||||||
fn from(item: BodyItem) -> Self {
|
fn from(item: BodyItem) -> Self {
|
||||||
Self([item.start(), item.end()])
|
Self([item.start(), item.end(), item.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&BodyItem> for SourceRange {
|
impl From<&BodyItem> for SourceRange {
|
||||||
fn from(item: &BodyItem) -> Self {
|
fn from(item: &BodyItem) -> Self {
|
||||||
Self([item.start(), item.end()])
|
Self([item.start(), item.end(), item.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -568,7 +606,7 @@ pub enum Expr {
|
|||||||
MemberExpression(BoxNode<MemberExpression>),
|
MemberExpression(BoxNode<MemberExpression>),
|
||||||
UnaryExpression(BoxNode<UnaryExpression>),
|
UnaryExpression(BoxNode<UnaryExpression>),
|
||||||
IfExpression(BoxNode<IfExpression>),
|
IfExpression(BoxNode<IfExpression>),
|
||||||
None(KclNone),
|
None(Node<KclNone>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Expr {
|
impl Expr {
|
||||||
@ -758,13 +796,13 @@ impl Expr {
|
|||||||
|
|
||||||
impl From<Expr> for SourceRange {
|
impl From<Expr> for SourceRange {
|
||||||
fn from(value: Expr) -> Self {
|
fn from(value: Expr) -> Self {
|
||||||
Self([value.start(), value.end()])
|
Self([value.start(), value.end(), value.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Expr> for SourceRange {
|
impl From<&Expr> for SourceRange {
|
||||||
fn from(value: &Expr) -> Self {
|
fn from(value: &Expr) -> Self {
|
||||||
Self([value.start(), value.end()])
|
Self([value.start(), value.end(), value.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -784,13 +822,13 @@ pub enum BinaryPart {
|
|||||||
|
|
||||||
impl From<BinaryPart> for SourceRange {
|
impl From<BinaryPart> for SourceRange {
|
||||||
fn from(value: BinaryPart) -> Self {
|
fn from(value: BinaryPart) -> Self {
|
||||||
Self([value.start(), value.end()])
|
Self([value.start(), value.end(), value.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&BinaryPart> for SourceRange {
|
impl From<&BinaryPart> for SourceRange {
|
||||||
fn from(value: &BinaryPart) -> Self {
|
fn from(value: &BinaryPart) -> Self {
|
||||||
Self([value.start(), value.end()])
|
Self([value.start(), value.end(), value.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2154,13 +2192,13 @@ impl MemberObject {
|
|||||||
|
|
||||||
impl From<MemberObject> for SourceRange {
|
impl From<MemberObject> for SourceRange {
|
||||||
fn from(obj: MemberObject) -> Self {
|
fn from(obj: MemberObject) -> Self {
|
||||||
Self([obj.start(), obj.end()])
|
Self([obj.start(), obj.end(), obj.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&MemberObject> for SourceRange {
|
impl From<&MemberObject> for SourceRange {
|
||||||
fn from(obj: &MemberObject) -> Self {
|
fn from(obj: &MemberObject) -> Self {
|
||||||
Self([obj.start(), obj.end()])
|
Self([obj.start(), obj.end(), obj.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2191,13 +2229,13 @@ impl LiteralIdentifier {
|
|||||||
|
|
||||||
impl From<LiteralIdentifier> for SourceRange {
|
impl From<LiteralIdentifier> for SourceRange {
|
||||||
fn from(id: LiteralIdentifier) -> Self {
|
fn from(id: LiteralIdentifier) -> Self {
|
||||||
Self([id.start(), id.end()])
|
Self([id.start(), id.end(), id.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&LiteralIdentifier> for SourceRange {
|
impl From<&LiteralIdentifier> for SourceRange {
|
||||||
fn from(id: &LiteralIdentifier) -> Self {
|
fn from(id: &LiteralIdentifier) -> Self {
|
||||||
Self([id.start(), id.end()])
|
Self([id.start(), id.end(), id.module_id().as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3018,9 +3056,7 @@ fn ghi = (x) => {
|
|||||||
|
|
||||||
ghi("things")
|
ghi("things")
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(code).unwrap();
|
let program = crate::parser::top_level_parse(code).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let folding_ranges = program.get_lsp_folding_ranges();
|
let folding_ranges = program.get_lsp_folding_ranges();
|
||||||
assert_eq!(folding_ranges.len(), 3);
|
assert_eq!(folding_ranges.len(), 3);
|
||||||
assert_eq!(folding_ranges[0].start_line, 29);
|
assert_eq!(folding_ranges[0].start_line, 29);
|
||||||
@ -3056,9 +3092,7 @@ fn ghi = (x) => {
|
|||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(code).unwrap();
|
let program = crate::parser::top_level_parse(code).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let symbols = program.get_lsp_symbols(code).unwrap();
|
let symbols = program.get_lsp_symbols(code).unwrap();
|
||||||
assert_eq!(symbols.len(), 7);
|
assert_eq!(symbols.len(), 7);
|
||||||
}
|
}
|
||||||
@ -3078,9 +3112,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
}, %)
|
}, %)
|
||||||
|> extrude(h, %)
|
|> extrude(h, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let value = program.get_non_code_meta_for_position(50);
|
let value = program.get_non_code_meta_for_position(50);
|
||||||
|
|
||||||
@ -3103,9 +3135,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
}, %)
|
}, %)
|
||||||
|> extrude(h, %)
|
|> extrude(h, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let value = program.get_non_code_meta_for_position(124);
|
let value = program.get_non_code_meta_for_position(124);
|
||||||
|
|
||||||
@ -3118,9 +3148,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
|> startProfileAt([0,0], %)
|
|> startProfileAt([0,0], %)
|
||||||
|> xLine(5, %) // lin
|
|> xLine(5, %) // lin
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let value = program.get_non_code_meta_for_position(86);
|
let value = program.get_non_code_meta_for_position(86);
|
||||||
|
|
||||||
@ -3132,9 +3160,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
let some_program_string = r#"fn thing = (arg0: number, arg1: string, tag?: string) => {
|
let some_program_string = r#"fn thing = (arg0: number, arg1: string, tag?: string) => {
|
||||||
return arg0
|
return arg0
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
// Check the program output for the types of the parameters.
|
// Check the program output for the types of the parameters.
|
||||||
let function = program.body.first().unwrap();
|
let function = program.body.first().unwrap();
|
||||||
@ -3156,9 +3182,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
let some_program_string = r#"fn thing = (arg0: number[], arg1: string[], tag?: string) => {
|
let some_program_string = r#"fn thing = (arg0: number[], arg1: string[], tag?: string) => {
|
||||||
return arg0
|
return arg0
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
// Check the program output for the types of the parameters.
|
// Check the program output for the types of the parameters.
|
||||||
let function = program.body.first().unwrap();
|
let function = program.body.first().unwrap();
|
||||||
@ -3180,9 +3204,8 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
let some_program_string = r#"fn thing = (arg0: number[], arg1: {thing: number, things: string[], more?: string}, tag?: string) => {
|
let some_program_string = r#"fn thing = (arg0: number[], arg1: {thing: number, things: string[], more?: string}, tag?: string) => {
|
||||||
return arg0
|
return arg0
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let module_id = ModuleId::default();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let program = crate::parser::parse(some_program_string, module_id).unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
// Check the program output for the types of the parameters.
|
// Check the program output for the types of the parameters.
|
||||||
let function = program.body.first().unwrap();
|
let function = program.body.first().unwrap();
|
||||||
@ -3207,6 +3230,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
35,
|
35,
|
||||||
40,
|
40,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
|
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
|
||||||
optional: false,
|
optional: false,
|
||||||
@ -3220,6 +3244,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
50,
|
50,
|
||||||
56,
|
56,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
|
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
|
||||||
optional: false,
|
optional: false,
|
||||||
@ -3233,6 +3258,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
68,
|
68,
|
||||||
72,
|
72,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
|
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
|
||||||
optional: true,
|
optional: true,
|
||||||
@ -3249,9 +3275,8 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
let some_program_string = r#"fn thing = () => {thing: number, things: string[], more?: string} {
|
let some_program_string = r#"fn thing = () => {thing: number, things: string[], more?: string} {
|
||||||
return 1
|
return 1
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let module_id = ModuleId::default();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let program = crate::parser::parse(some_program_string, module_id).unwrap();
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
// Check the program output for the types of the parameters.
|
// Check the program output for the types of the parameters.
|
||||||
let function = program.body.first().unwrap();
|
let function = program.body.first().unwrap();
|
||||||
@ -3275,6 +3300,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
18,
|
18,
|
||||||
23,
|
23,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
|
type_: Some(FnArgType::Primitive(FnArgPrimitive::Number)),
|
||||||
optional: false,
|
optional: false,
|
||||||
@ -3288,6 +3314,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
33,
|
33,
|
||||||
39,
|
39,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
|
type_: Some(FnArgType::Array(FnArgPrimitive::String)),
|
||||||
optional: false,
|
optional: false,
|
||||||
@ -3301,6 +3328,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
51,
|
51,
|
||||||
55,
|
55,
|
||||||
|
module_id,
|
||||||
),
|
),
|
||||||
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
|
type_: Some(FnArgType::Primitive(FnArgPrimitive::String)),
|
||||||
optional: true,
|
optional: true,
|
||||||
@ -3349,6 +3377,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
module_id: ModuleId::default(),
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
digest: None,
|
digest: None,
|
||||||
@ -3375,6 +3404,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
module_id: ModuleId::default(),
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
digest: None,
|
digest: None,
|
||||||
@ -3412,6 +3442,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
},
|
},
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
module_id: ModuleId::default(),
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
digest: None,
|
digest: None,
|
||||||
@ -3429,9 +3460,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_parse_object_bool() {
|
async fn test_parse_object_bool() {
|
||||||
let some_program_string = r#"some_func({thing: true, other_thing: false})"#;
|
let some_program_string = r#"some_func({thing: true, other_thing: false})"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
// We want to get the bool and verify it is a bool.
|
// We want to get the bool and verify it is a bool.
|
||||||
|
|
||||||
@ -3479,14 +3508,12 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
|> startProfileAt([0, 0], %)
|
|> startProfileAt([0, 0], %)
|
||||||
|> line([5, 5], %, $xLine)
|
|> line([5, 5], %, $xLine)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let result = crate::parser::top_level_parse(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let result = parser.ast();
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.unwrap_err().to_string(),
|
result.unwrap_err().to_string(),
|
||||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([76, 82])], message: "Cannot assign a tag to a reserved keyword: xLine" }"#
|
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([76, 82, 0])], message: "Cannot assign a tag to a reserved keyword: xLine" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3496,14 +3523,12 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
|> startProfileAt([0, 0], %)
|
|> startProfileAt([0, 0], %)
|
||||||
|> line([5, 5], %, $)
|
|> line([5, 5], %, $)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let result = crate::parser::top_level_parse(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let result = parser.ast();
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.unwrap_err().to_string(),
|
result.unwrap_err().to_string(),
|
||||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([57, 59])], message: "Unexpected token: |>" }"#
|
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([57, 59, 0])], message: "Unexpected token: |>" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3513,17 +3538,13 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
|> startProfileAt([0, 0], %)
|
|> startProfileAt([0, 0], %)
|
||||||
|> line([5, 5], %)
|
|> line([5, 5], %)
|
||||||
"#;
|
"#;
|
||||||
let prog1_tokens = crate::token::lexer(prog1_string).unwrap();
|
let prog1_digest = crate::parser::top_level_parse(prog1_string).unwrap().compute_digest();
|
||||||
let prog1_parser = crate::parser::Parser::new(prog1_tokens);
|
|
||||||
let prog1_digest = prog1_parser.ast().unwrap().compute_digest();
|
|
||||||
|
|
||||||
let prog2_string = r#"startSketchOn('XY')
|
let prog2_string = r#"startSketchOn('XY')
|
||||||
|> startProfileAt([0, 2], %)
|
|> startProfileAt([0, 2], %)
|
||||||
|> line([5, 5], %)
|
|> line([5, 5], %)
|
||||||
"#;
|
"#;
|
||||||
let prog2_tokens = crate::token::lexer(prog2_string).unwrap();
|
let prog2_digest = crate::parser::top_level_parse(prog2_string).unwrap().compute_digest();
|
||||||
let prog2_parser = crate::parser::Parser::new(prog2_tokens);
|
|
||||||
let prog2_digest = prog2_parser.ast().unwrap().compute_digest();
|
|
||||||
|
|
||||||
assert!(prog1_digest != prog2_digest);
|
assert!(prog1_digest != prog2_digest);
|
||||||
|
|
||||||
@ -3531,9 +3552,7 @@ const cylinder = startSketchOn('-XZ')
|
|||||||
|> startProfileAt([0, 0], %)
|
|> startProfileAt([0, 0], %)
|
||||||
|> line([5, 5], %)
|
|> line([5, 5], %)
|
||||||
"#;
|
"#;
|
||||||
let prog3_tokens = crate::token::lexer(prog3_string).unwrap();
|
let prog3_digest = crate::parser::top_level_parse(prog3_string).unwrap().compute_digest();
|
||||||
let prog3_parser = crate::parser::Parser::new(prog3_tokens);
|
|
||||||
let prog3_digest = prog3_parser.ast().unwrap().compute_digest();
|
|
||||||
|
|
||||||
assert_eq!(prog1_digest, prog3_digest);
|
assert_eq!(prog1_digest, prog3_digest);
|
||||||
}
|
}
|
||||||
|
@ -50,7 +50,7 @@ impl Node<IfExpression> {
|
|||||||
impl Node<ElseIf> {
|
impl Node<ElseIf> {
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
fn source_ranges(&self) -> Vec<SourceRange> {
|
fn source_ranges(&self) -> Vec<SourceRange> {
|
||||||
vec![SourceRange([self.start, self.end])]
|
vec![SourceRange([self.start, self.end, self.module_id.as_usize()])]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,7 +235,7 @@ pub(crate) async fn execute_pipe_body(
|
|||||||
// they use the % from the parent. After all, this pipe expression hasn't been executed yet, so it doesn't have any % value
|
// they use the % from the parent. After all, this pipe expression hasn't been executed yet, so it doesn't have any % value
|
||||||
// of its own.
|
// of its own.
|
||||||
let meta = Metadata {
|
let meta = Metadata {
|
||||||
source_range: SourceRange([first.start(), first.end()]),
|
source_range: SourceRange::from(first),
|
||||||
};
|
};
|
||||||
let output = ctx
|
let output = ctx
|
||||||
.execute_expr(first, exec_state, &meta, StatementKind::Expression)
|
.execute_expr(first, exec_state, &meta, StatementKind::Expression)
|
||||||
@ -285,7 +285,7 @@ async fn inner_execute_pipe_body(
|
|||||||
| Expr::None(_) => {}
|
| Expr::None(_) => {}
|
||||||
};
|
};
|
||||||
let metadata = Metadata {
|
let metadata = Metadata {
|
||||||
source_range: SourceRange([expression.start(), expression.end()]),
|
source_range: SourceRange::from(expression),
|
||||||
};
|
};
|
||||||
let output = ctx
|
let output = ctx
|
||||||
.execute_expr(expression, exec_state, &metadata, StatementKind::Expression)
|
.execute_expr(expression, exec_state, &metadata, StatementKind::Expression)
|
||||||
|
@ -6,9 +6,11 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::types::ConstraintLevel,
|
ast::types::ConstraintLevel,
|
||||||
executor::{KclValue, SourceRange, UserVal},
|
executor::{KclValue, UserVal},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use super::Node;
|
||||||
|
|
||||||
const KCL_NONE_ID: &str = "KCL_NONE_ID";
|
const KCL_NONE_ID: &str = "KCL_NONE_ID";
|
||||||
|
|
||||||
/// KCL value for an optional parameter which was not given an argument.
|
/// KCL value for an optional parameter which was not given an argument.
|
||||||
@ -19,9 +21,6 @@ const KCL_NONE_ID: &str = "KCL_NONE_ID";
|
|||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub struct KclNone {
|
pub struct KclNone {
|
||||||
// TODO: Convert this to be an Option<SourceRange>.
|
|
||||||
pub start: usize,
|
|
||||||
pub end: usize,
|
|
||||||
#[serde(deserialize_with = "deser_private")]
|
#[serde(deserialize_with = "deser_private")]
|
||||||
#[ts(skip)]
|
#[ts(skip)]
|
||||||
#[schemars(skip)]
|
#[schemars(skip)]
|
||||||
@ -29,12 +28,8 @@ pub struct KclNone {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl KclNone {
|
impl KclNone {
|
||||||
pub fn new(start: usize, end: usize) -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self { __private: Private {} }
|
||||||
start,
|
|
||||||
end,
|
|
||||||
__private: Private {},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,12 +58,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&KclNone> for SourceRange {
|
|
||||||
fn from(v: &KclNone) -> Self {
|
|
||||||
Self([v.start, v.end])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&KclNone> for UserVal {
|
impl From<&KclNone> for UserVal {
|
||||||
fn from(none: &KclNone) -> Self {
|
fn from(none: &KclNone) -> Self {
|
||||||
UserVal {
|
UserVal {
|
||||||
@ -85,16 +74,18 @@ impl From<&KclNone> for KclValue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl KclNone {
|
impl From<&Node<KclNone>> for KclValue {
|
||||||
pub fn source_range(&self) -> SourceRange {
|
fn from(none: &Node<KclNone>) -> Self {
|
||||||
SourceRange([self.start, self.end])
|
Self::from(&none.inner)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Node<KclNone> {
|
||||||
/// Get the constraint level.
|
/// Get the constraint level.
|
||||||
/// KCL None is never constrained.
|
/// KCL None is never constrained.
|
||||||
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
pub fn get_constraint_level(&self) -> ConstraintLevel {
|
||||||
ConstraintLevel::None {
|
ConstraintLevel::None {
|
||||||
source_ranges: vec![self.source_range()],
|
source_ranges: self.as_source_ranges(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
66
src/wasm-lib/kcl/src/ast/types/source_range.rs
Normal file
66
src/wasm-lib/kcl/src/ast/types/source_range.rs
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
use super::{BinaryPart, BodyItem, Expr, LiteralIdentifier, MemberObject, ModuleId};
|
||||||
|
|
||||||
|
impl BodyItem {
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
match self {
|
||||||
|
BodyItem::ImportStatement(stmt) => stmt.module_id,
|
||||||
|
BodyItem::ExpressionStatement(expression_statement) => expression_statement.module_id,
|
||||||
|
BodyItem::VariableDeclaration(variable_declaration) => variable_declaration.module_id,
|
||||||
|
BodyItem::ReturnStatement(return_statement) => return_statement.module_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Expr {
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
match self {
|
||||||
|
Expr::Literal(literal) => literal.module_id,
|
||||||
|
Expr::Identifier(identifier) => identifier.module_id,
|
||||||
|
Expr::TagDeclarator(tag) => tag.module_id,
|
||||||
|
Expr::BinaryExpression(binary_expression) => binary_expression.module_id,
|
||||||
|
Expr::FunctionExpression(function_expression) => function_expression.module_id,
|
||||||
|
Expr::CallExpression(call_expression) => call_expression.module_id,
|
||||||
|
Expr::PipeExpression(pipe_expression) => pipe_expression.module_id,
|
||||||
|
Expr::PipeSubstitution(pipe_substitution) => pipe_substitution.module_id,
|
||||||
|
Expr::ArrayExpression(array_expression) => array_expression.module_id,
|
||||||
|
Expr::ArrayRangeExpression(array_range) => array_range.module_id,
|
||||||
|
Expr::ObjectExpression(object_expression) => object_expression.module_id,
|
||||||
|
Expr::MemberExpression(member_expression) => member_expression.module_id,
|
||||||
|
Expr::UnaryExpression(unary_expression) => unary_expression.module_id,
|
||||||
|
Expr::IfExpression(expr) => expr.module_id,
|
||||||
|
Expr::None(none) => none.module_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BinaryPart {
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
match self {
|
||||||
|
BinaryPart::Literal(literal) => literal.module_id,
|
||||||
|
BinaryPart::Identifier(identifier) => identifier.module_id,
|
||||||
|
BinaryPart::BinaryExpression(binary_expression) => binary_expression.module_id,
|
||||||
|
BinaryPart::CallExpression(call_expression) => call_expression.module_id,
|
||||||
|
BinaryPart::UnaryExpression(unary_expression) => unary_expression.module_id,
|
||||||
|
BinaryPart::MemberExpression(member_expression) => member_expression.module_id,
|
||||||
|
BinaryPart::IfExpression(e) => e.module_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MemberObject {
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
match self {
|
||||||
|
MemberObject::MemberExpression(member_expression) => member_expression.module_id,
|
||||||
|
MemberObject::Identifier(identifier) => identifier.module_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LiteralIdentifier {
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
match self {
|
||||||
|
LiteralIdentifier::Identifier(identifier) => identifier.module_id,
|
||||||
|
LiteralIdentifier::Literal(literal) => literal.module_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||||
|
|
||||||
use crate::{executor::SourceRange, lsp::IntoDiagnostic};
|
use crate::{ast::types::ModuleId, executor::SourceRange, lsp::IntoDiagnostic};
|
||||||
|
|
||||||
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS, Clone, PartialEq, Eq)]
|
#[derive(Error, Debug, Serialize, Deserialize, ts_rs::TS, Clone, PartialEq, Eq)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
@ -147,6 +147,13 @@ impl IntoDiagnostic for KclError {
|
|||||||
let message = self.get_message();
|
let message = self.get_message();
|
||||||
let source_ranges = self.source_ranges();
|
let source_ranges = self.source_ranges();
|
||||||
|
|
||||||
|
// Limit to only errors in the top-level file.
|
||||||
|
let module_id = ModuleId::default();
|
||||||
|
let source_ranges = source_ranges
|
||||||
|
.iter()
|
||||||
|
.filter(|r| r.module_id() == module_id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
Diagnostic {
|
Diagnostic {
|
||||||
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
range: source_ranges.first().map(|r| r.to_lsp_range(code)).unwrap_or_default(),
|
||||||
severity: Some(self.severity()),
|
severity: Some(self.severity()),
|
||||||
|
@ -7,6 +7,7 @@ use std::{
|
|||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use async_recursion::async_recursion;
|
use async_recursion::async_recursion;
|
||||||
|
use indexmap::IndexMap;
|
||||||
use kcmc::{
|
use kcmc::{
|
||||||
each_cmd as mcmd,
|
each_cmd as mcmd,
|
||||||
ok_response::{output::TakeSnapshot, OkModelingCmdResponse},
|
ok_response::{output::TakeSnapshot, OkModelingCmdResponse},
|
||||||
@ -26,8 +27,8 @@ type Point3D = kcmc::shared::Point3d<f64>;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{
|
ast::types::{
|
||||||
human_friendly_type, BodyItem, Expr, FunctionExpression, ItemVisibility, KclNone, Node, NodeRef, Program,
|
human_friendly_type, BodyItem, Expr, FunctionExpression, ItemVisibility, KclNone, ModuleId, Node, NodeRef,
|
||||||
TagDeclarator, TagNode,
|
Program, TagDeclarator, TagNode,
|
||||||
},
|
},
|
||||||
engine::{EngineManager, ExecutionKind},
|
engine::{EngineManager, ExecutionKind},
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
@ -55,11 +56,32 @@ pub struct ExecState {
|
|||||||
/// The stack of import statements for detecting circular module imports.
|
/// The stack of import statements for detecting circular module imports.
|
||||||
/// If this is empty, we're not currently executing an import statement.
|
/// If this is empty, we're not currently executing an import statement.
|
||||||
pub import_stack: Vec<std::path::PathBuf>,
|
pub import_stack: Vec<std::path::PathBuf>,
|
||||||
|
/// Map from source file absolute path to module ID.
|
||||||
|
pub path_to_source_id: IndexMap<std::path::PathBuf, ModuleId>,
|
||||||
|
/// Map from module ID to module info.
|
||||||
|
pub module_infos: IndexMap<ModuleId, ModuleInfo>,
|
||||||
/// The directory of the current project. This is used for resolving import
|
/// The directory of the current project. This is used for resolving import
|
||||||
/// paths. If None is given, the current working directory is used.
|
/// paths. If None is given, the current working directory is used.
|
||||||
pub project_directory: Option<String>,
|
pub project_directory: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ExecState {
|
||||||
|
pub fn add_module(&mut self, path: std::path::PathBuf) -> ModuleId {
|
||||||
|
// Need to avoid borrowing self in the closure.
|
||||||
|
let new_module_id = ModuleId::from_usize(self.path_to_source_id.len());
|
||||||
|
let mut is_new = false;
|
||||||
|
let id = *self.path_to_source_id.entry(path.clone()).or_insert_with(|| {
|
||||||
|
is_new = true;
|
||||||
|
new_module_id
|
||||||
|
});
|
||||||
|
if is_new {
|
||||||
|
let module_info = ModuleInfo { id, path };
|
||||||
|
self.module_infos.insert(id, module_info);
|
||||||
|
}
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, ts_rs::TS, JsonSchema)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
@ -1373,21 +1395,33 @@ pub enum BodyType {
|
|||||||
Block,
|
Block,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Info about a module. Right now, this is pretty minimal. We hope to cache
|
||||||
|
/// modules here in the future.
|
||||||
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserialize, Serialize, ts_rs::TS, JsonSchema)]
|
||||||
|
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||||
|
#[ts(export)]
|
||||||
|
pub struct ModuleInfo {
|
||||||
|
/// The ID of the module.
|
||||||
|
id: ModuleId,
|
||||||
|
/// Absolute path of the module's source file.
|
||||||
|
path: std::path::PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Copy, Clone, ts_rs::TS, JsonSchema, Hash, Eq)]
|
#[derive(Debug, Default, Deserialize, Serialize, PartialEq, Copy, Clone, ts_rs::TS, JsonSchema, Hash, Eq)]
|
||||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
#[cfg_attr(feature = "pyo3", pyo3::pyclass)]
|
||||||
#[ts(export)]
|
#[ts(export)]
|
||||||
pub struct SourceRange(#[ts(type = "[number, number]")] pub [usize; 2]);
|
pub struct SourceRange(#[ts(type = "[number, number]")] pub [usize; 3]);
|
||||||
|
|
||||||
impl From<[usize; 2]> for SourceRange {
|
impl From<[usize; 3]> for SourceRange {
|
||||||
fn from(value: [usize; 2]) -> Self {
|
fn from(value: [usize; 3]) -> Self {
|
||||||
Self(value)
|
Self(value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceRange {
|
impl SourceRange {
|
||||||
/// Create a new source range.
|
/// Create a new source range.
|
||||||
pub fn new(start: usize, end: usize) -> Self {
|
pub fn new(start: usize, end: usize, module_id: ModuleId) -> Self {
|
||||||
Self([start, end])
|
Self([start, end, module_id.as_usize()])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the start of the range.
|
/// Get the start of the range.
|
||||||
@ -1400,6 +1434,11 @@ impl SourceRange {
|
|||||||
self.0[1]
|
self.0[1]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the module ID of the range.
|
||||||
|
pub fn module_id(&self) -> ModuleId {
|
||||||
|
ModuleId::from_usize(self.0[2])
|
||||||
|
}
|
||||||
|
|
||||||
/// Check if the range contains a position.
|
/// Check if the range contains a position.
|
||||||
pub fn contains(&self, pos: usize) -> bool {
|
pub fn contains(&self, pos: usize) -> bool {
|
||||||
pos >= self.start() && pos <= self.end()
|
pos >= self.start() && pos <= self.end()
|
||||||
@ -1533,7 +1572,7 @@ impl From<SourceRange> for Metadata {
|
|||||||
impl<T> From<NodeRef<'_, T>> for Metadata {
|
impl<T> From<NodeRef<'_, T>> for Metadata {
|
||||||
fn from(node: NodeRef<'_, T>) -> Self {
|
fn from(node: NodeRef<'_, T>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
source_range: SourceRange::new(node.start, node.end),
|
source_range: SourceRange::new(node.start, node.end, node.module_id),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2171,6 +2210,8 @@ impl ExecutorContext {
|
|||||||
project_directory,
|
project_directory,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
// TODO: Use the top-level file's path.
|
||||||
|
exec_state.add_module(std::path::PathBuf::from(""));
|
||||||
// Before we even start executing the program, set the units.
|
// Before we even start executing the program, set the units.
|
||||||
self.engine
|
self.engine
|
||||||
.batch_modeling_cmd(
|
.batch_modeling_cmd(
|
||||||
@ -2210,6 +2251,13 @@ impl ExecutorContext {
|
|||||||
BodyItem::ImportStatement(import_stmt) => {
|
BodyItem::ImportStatement(import_stmt) => {
|
||||||
let source_range = SourceRange::from(import_stmt);
|
let source_range = SourceRange::from(import_stmt);
|
||||||
let path = import_stmt.path.clone();
|
let path = import_stmt.path.clone();
|
||||||
|
// Empty path is used by the top-level module.
|
||||||
|
if path.is_empty() {
|
||||||
|
return Err(KclError::Semantic(KclErrorDetails {
|
||||||
|
message: "import path cannot be empty".to_owned(),
|
||||||
|
source_ranges: vec![source_range],
|
||||||
|
}));
|
||||||
|
}
|
||||||
let resolved_path = if let Some(project_dir) = &exec_state.project_directory {
|
let resolved_path = if let Some(project_dir) = &exec_state.project_directory {
|
||||||
std::path::PathBuf::from(project_dir).join(&path)
|
std::path::PathBuf::from(project_dir).join(&path)
|
||||||
} else {
|
} else {
|
||||||
@ -2230,8 +2278,9 @@ impl ExecutorContext {
|
|||||||
source_ranges: vec![import_stmt.into()],
|
source_ranges: vec![import_stmt.into()],
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
let module_id = exec_state.add_module(resolved_path.clone());
|
||||||
let source = self.fs.read_to_string(&resolved_path, source_range).await?;
|
let source = self.fs.read_to_string(&resolved_path, source_range).await?;
|
||||||
let program = crate::parser::parse(&source)?;
|
let program = crate::parser::parse(&source, module_id)?;
|
||||||
let (module_memory, module_exports) = {
|
let (module_memory, module_exports) = {
|
||||||
exec_state.import_stack.push(resolved_path.clone());
|
exec_state.import_stack.push(resolved_path.clone());
|
||||||
let original_execution = self.engine.replace_execution_kind(ExecutionKind::Isolated);
|
let original_execution = self.engine.replace_execution_kind(ExecutionKind::Isolated);
|
||||||
@ -2359,7 +2408,7 @@ impl ExecutorContext {
|
|||||||
// True here tells the engine to flush all the end commands as well like fillets
|
// True here tells the engine to flush all the end commands as well like fillets
|
||||||
// and chamfers where the engine would otherwise eat the ID of the segments.
|
// and chamfers where the engine would otherwise eat the ID of the segments.
|
||||||
true,
|
true,
|
||||||
SourceRange([program.end, program.end]),
|
SourceRange([program.end, program.end, program.module_id.as_usize()]),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
@ -2525,7 +2574,12 @@ fn assign_args_to_params(
|
|||||||
if param.optional {
|
if param.optional {
|
||||||
// If the corresponding parameter is optional,
|
// If the corresponding parameter is optional,
|
||||||
// then it's fine, the user doesn't need to supply it.
|
// then it's fine, the user doesn't need to supply it.
|
||||||
let none = KclNone::new(param.identifier.start, param.identifier.end);
|
let none = Node {
|
||||||
|
inner: KclNone::new(),
|
||||||
|
start: param.identifier.start,
|
||||||
|
end: param.identifier.end,
|
||||||
|
module_id: param.identifier.module_id,
|
||||||
|
};
|
||||||
fn_memory.add(
|
fn_memory.add(
|
||||||
¶m.identifier.name,
|
¶m.identifier.name,
|
||||||
KclValue::from(&none),
|
KclValue::from(&none),
|
||||||
@ -2586,9 +2640,8 @@ mod tests {
|
|||||||
use crate::ast::types::{Identifier, Node, Parameter};
|
use crate::ast::types::{Identifier, Node, Parameter};
|
||||||
|
|
||||||
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
pub async fn parse_execute(code: &str) -> Result<ProgramMemory> {
|
||||||
let tokens = crate::token::lexer(code)?;
|
let program = crate::parser::top_level_parse(code)?;
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast()?;
|
|
||||||
let ctx = ExecutorContext {
|
let ctx = ExecutorContext {
|
||||||
engine: Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await?)),
|
engine: Arc::new(Box::new(crate::engine::conn_mock::EngineConnection::new().await?)),
|
||||||
fs: Arc::new(crate::fs::FileManager::new()),
|
fs: Arc::new(crate::fs::FileManager::new()),
|
||||||
@ -3027,7 +3080,7 @@ const answer = returnX()"#;
|
|||||||
err,
|
err,
|
||||||
KclError::UndefinedValue(KclErrorDetails {
|
KclError::UndefinedValue(KclErrorDetails {
|
||||||
message: "memory item key `x` is not defined".to_owned(),
|
message: "memory item key `x` is not defined".to_owned(),
|
||||||
source_ranges: vec![SourceRange([64, 65]), SourceRange([97, 106])],
|
source_ranges: vec![SourceRange([64, 65, 0]), SourceRange([97, 106, 0])],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -3062,7 +3115,7 @@ let shape = layer() |> patternTransform(10, transform, %)
|
|||||||
err,
|
err,
|
||||||
KclError::UndefinedValue(KclErrorDetails {
|
KclError::UndefinedValue(KclErrorDetails {
|
||||||
message: "memory item key `x` is not defined".to_owned(),
|
message: "memory item key `x` is not defined".to_owned(),
|
||||||
source_ranges: vec![SourceRange([80, 81])],
|
source_ranges: vec![SourceRange([80, 81, 0])],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -3317,7 +3370,7 @@ let notNull = !myNull
|
|||||||
parse_execute(code1).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code1).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: null".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: null".to_owned(),
|
||||||
source_ranges: vec![SourceRange([56, 63])],
|
source_ranges: vec![SourceRange([56, 63, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3326,7 +3379,7 @@ let notNull = !myNull
|
|||||||
parse_execute(code2).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code2).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: 0".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: 0".to_owned(),
|
||||||
source_ranges: vec![SourceRange([14, 16])],
|
source_ranges: vec![SourceRange([14, 16, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3337,7 +3390,7 @@ let notEmptyString = !""
|
|||||||
parse_execute(code3).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code3).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: \"\"".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: \"\"".to_owned(),
|
||||||
source_ranges: vec![SourceRange([22, 25])],
|
source_ranges: vec![SourceRange([22, 25, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3349,7 +3402,7 @@ let notMember = !obj.a
|
|||||||
parse_execute(code4).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code4).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: 1".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: 1".to_owned(),
|
||||||
source_ranges: vec![SourceRange([36, 42])],
|
source_ranges: vec![SourceRange([36, 42, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3360,7 +3413,7 @@ let notArray = !a";
|
|||||||
parse_execute(code5).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code5).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: []".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: []".to_owned(),
|
||||||
source_ranges: vec![SourceRange([27, 29])],
|
source_ranges: vec![SourceRange([27, 29, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3371,7 +3424,7 @@ let notObject = !x";
|
|||||||
parse_execute(code6).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code6).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Semantic(KclErrorDetails {
|
KclError::Semantic(KclErrorDetails {
|
||||||
message: "Cannot apply unary operator ! to non-boolean value: {}".to_owned(),
|
message: "Cannot apply unary operator ! to non-boolean value: {}".to_owned(),
|
||||||
source_ranges: vec![SourceRange([28, 30])],
|
source_ranges: vec![SourceRange([28, 30, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3424,7 +3477,7 @@ let notTagIdentifier = !myTag";
|
|||||||
parse_execute(code10).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code10).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Syntax(KclErrorDetails {
|
KclError::Syntax(KclErrorDetails {
|
||||||
message: "Unexpected token: !".to_owned(),
|
message: "Unexpected token: !".to_owned(),
|
||||||
source_ranges: vec![SourceRange([14, 15])],
|
source_ranges: vec![SourceRange([14, 15, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3437,7 +3490,7 @@ let notPipeSub = 1 |> identity(!%))";
|
|||||||
parse_execute(code11).await.unwrap_err().downcast::<KclError>().unwrap(),
|
parse_execute(code11).await.unwrap_err().downcast::<KclError>().unwrap(),
|
||||||
KclError::Syntax(KclErrorDetails {
|
KclError::Syntax(KclErrorDetails {
|
||||||
message: "Unexpected token: |>".to_owned(),
|
message: "Unexpected token: |>".to_owned(),
|
||||||
source_ranges: vec![SourceRange([54, 56])],
|
source_ranges: vec![SourceRange([54, 56, 0])],
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -3483,7 +3536,7 @@ test([0, 0])
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.unwrap_err().to_string(),
|
result.unwrap_err().to_string(),
|
||||||
r#"undefined value: KclErrorDetails { source_ranges: [SourceRange([10, 34])], message: "Result of user-defined function test is undefined" }"#.to_owned()
|
r#"undefined value: KclErrorDetails { source_ranges: [SourceRange([10, 34, 0])], message: "Result of user-defined function test is undefined" }"#.to_owned()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3600,7 +3653,7 @@ let w = f() + f()
|
|||||||
vec![req_param("x")],
|
vec![req_param("x")],
|
||||||
vec![],
|
vec![],
|
||||||
Err(KclError::Semantic(KclErrorDetails {
|
Err(KclError::Semantic(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([0, 0])],
|
source_ranges: vec![SourceRange([0, 0, 0])],
|
||||||
message: "Expected 1 arguments, got 0".to_owned(),
|
message: "Expected 1 arguments, got 0".to_owned(),
|
||||||
})),
|
})),
|
||||||
),
|
),
|
||||||
@ -3618,7 +3671,7 @@ let w = f() + f()
|
|||||||
vec![req_param("x"), opt_param("y")],
|
vec![req_param("x"), opt_param("y")],
|
||||||
vec![],
|
vec![],
|
||||||
Err(KclError::Semantic(KclErrorDetails {
|
Err(KclError::Semantic(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([0, 0])],
|
source_ranges: vec![SourceRange([0, 0, 0])],
|
||||||
message: "Expected 1-2 arguments, got 0".to_owned(),
|
message: "Expected 1-2 arguments, got 0".to_owned(),
|
||||||
})),
|
})),
|
||||||
),
|
),
|
||||||
@ -3645,7 +3698,7 @@ let w = f() + f()
|
|||||||
vec![req_param("x"), opt_param("y")],
|
vec![req_param("x"), opt_param("y")],
|
||||||
vec![mem(1), mem(2), mem(3)],
|
vec![mem(1), mem(2), mem(3)],
|
||||||
Err(KclError::Semantic(KclErrorDetails {
|
Err(KclError::Semantic(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([0, 0])],
|
source_ranges: vec![SourceRange([0, 0, 0])],
|
||||||
message: "Expected 1-2 arguments, got 3".to_owned(),
|
message: "Expected 1-2 arguments, got 3".to_owned(),
|
||||||
})),
|
})),
|
||||||
),
|
),
|
||||||
@ -3661,6 +3714,7 @@ let w = f() + f()
|
|||||||
},
|
},
|
||||||
start: 0,
|
start: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
|
module_id: ModuleId::default(),
|
||||||
},
|
},
|
||||||
return_type: None,
|
return_type: None,
|
||||||
digest: None,
|
digest: None,
|
||||||
|
@ -29,7 +29,10 @@ fn lint_lower_camel_case_var(decl: &VariableDeclarator) -> Result<Vec<Discovered
|
|||||||
let name = &ident.name;
|
let name = &ident.name;
|
||||||
|
|
||||||
if !name.is_case(convert_case::Case::Camel) {
|
if !name.is_case(convert_case::Case::Camel) {
|
||||||
findings.push(Z0001.at(format!("found '{}'", name), SourceRange::new(ident.start, ident.end)));
|
findings.push(Z0001.at(
|
||||||
|
format!("found '{}'", name),
|
||||||
|
SourceRange::new(ident.start, ident.end, ident.module_id),
|
||||||
|
));
|
||||||
return Ok(findings);
|
return Ok(findings);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,7 +45,10 @@ fn lint_lower_camel_case_property(decl: &ObjectProperty) -> Result<Vec<Discovere
|
|||||||
let name = &ident.name;
|
let name = &ident.name;
|
||||||
|
|
||||||
if !name.is_case(convert_case::Case::Camel) {
|
if !name.is_case(convert_case::Case::Camel) {
|
||||||
findings.push(Z0001.at(format!("found '{}'", name), SourceRange::new(ident.start, ident.end)));
|
findings.push(Z0001.at(
|
||||||
|
format!("found '{}'", name),
|
||||||
|
SourceRange::new(ident.start, ident.end, ident.module_id),
|
||||||
|
));
|
||||||
return Ok(findings);
|
return Ok(findings);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ pub fn lint_should_be_offset_plane(node: Node) -> Result<Vec<Discovered>> {
|
|||||||
return Ok(vec![]);
|
return Ok(vec![]);
|
||||||
};
|
};
|
||||||
|
|
||||||
let call_source_range = SourceRange::new(call.start, call.end);
|
let call_source_range = SourceRange::new(call.start, call.end, call.module_id);
|
||||||
Ok(vec![Z0003.at(
|
Ok(vec![Z0003.at(
|
||||||
format!(
|
format!(
|
||||||
"custom plane in startSketchOn; offsetPlane from {} would work here",
|
"custom plane in startSketchOn; offsetPlane from {} would work here",
|
||||||
|
@ -28,7 +28,7 @@ fn lint_too_many_args_std_lib_function(
|
|||||||
if exp.arguments.len() != 2 {
|
if exp.arguments.len() != 2 {
|
||||||
findings.push(Z0002.at(
|
findings.push(Z0002.at(
|
||||||
format!("expected 2 arguments, found {}", exp.arguments.len()),
|
format!("expected 2 arguments, found {}", exp.arguments.len()),
|
||||||
SourceRange::new(exp.start, exp.end),
|
SourceRange::new(exp.start, exp.end, exp.module_id),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
return Ok(findings);
|
return Ok(findings);
|
||||||
@ -38,7 +38,7 @@ fn lint_too_many_args_std_lib_function(
|
|||||||
if exp.arguments.len() < 2 {
|
if exp.arguments.len() < 2 {
|
||||||
findings.push(Z0002.at(
|
findings.push(Z0002.at(
|
||||||
format!("expected at least 2 arguments, found {}", exp.arguments.len()),
|
format!("expected at least 2 arguments, found {}", exp.arguments.len()),
|
||||||
SourceRange::new(exp.start, exp.end),
|
SourceRange::new(exp.start, exp.end, exp.module_id),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
return Ok(findings);
|
return Ok(findings);
|
||||||
@ -48,7 +48,7 @@ fn lint_too_many_args_std_lib_function(
|
|||||||
if exp.arguments.len() > fn_args_len {
|
if exp.arguments.len() > fn_args_len {
|
||||||
findings.push(Z0002.at(
|
findings.push(Z0002.at(
|
||||||
format!("expected {} arguments, found {}", fn_args_len, exp.arguments.len()),
|
format!("expected {} arguments, found {}", fn_args_len, exp.arguments.len()),
|
||||||
SourceRange::new(exp.start, exp.end),
|
SourceRange::new(exp.start, exp.end, exp.module_id),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,9 +182,7 @@ mod test {
|
|||||||
|
|
||||||
macro_rules! assert_no_finding {
|
macro_rules! assert_no_finding {
|
||||||
( $check:expr, $finding:expr, $kcl:expr ) => {
|
( $check:expr, $finding:expr, $kcl:expr ) => {
|
||||||
let tokens = $crate::token::lexer($kcl).unwrap();
|
let prog = $crate::parser::top_level_parse($kcl).unwrap();
|
||||||
let parser = $crate::parser::Parser::new(tokens);
|
|
||||||
let prog = parser.ast().unwrap();
|
|
||||||
for discovered_finding in prog.lint($check).unwrap() {
|
for discovered_finding in prog.lint($check).unwrap() {
|
||||||
if discovered_finding.finding == $finding {
|
if discovered_finding.finding == $finding {
|
||||||
assert!(false, "Finding {:?} was emitted", $finding.code);
|
assert!(false, "Finding {:?} was emitted", $finding.code);
|
||||||
@ -195,9 +193,7 @@ mod test {
|
|||||||
|
|
||||||
macro_rules! assert_finding {
|
macro_rules! assert_finding {
|
||||||
( $check:expr, $finding:expr, $kcl:expr ) => {
|
( $check:expr, $finding:expr, $kcl:expr ) => {
|
||||||
let tokens = $crate::token::lexer($kcl).unwrap();
|
let prog = $crate::parser::top_level_parse($kcl).unwrap();
|
||||||
let parser = $crate::parser::Parser::new(tokens);
|
|
||||||
let prog = parser.ast().unwrap();
|
|
||||||
|
|
||||||
for discovered_finding in prog.lint($check).unwrap() {
|
for discovered_finding in prog.lint($check).unwrap() {
|
||||||
if discovered_finding.finding == $finding {
|
if discovered_finding.finding == $finding {
|
||||||
|
@ -40,7 +40,7 @@ use tower_lsp::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{Expr, Node, NodeRef, VariableKind},
|
ast::types::{Expr, ModuleId, Node, NodeRef, VariableKind},
|
||||||
executor::{IdGenerator, SourceRange},
|
executor::{IdGenerator, SourceRange},
|
||||||
lsp::{backend::Backend as _, util::IntoDiagnostic},
|
lsp::{backend::Backend as _, util::IntoDiagnostic},
|
||||||
parser::PIPE_OPERATOR,
|
parser::PIPE_OPERATOR,
|
||||||
@ -188,7 +188,8 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
// We already updated the code map in the shared backend.
|
// We already updated the code map in the shared backend.
|
||||||
|
|
||||||
// Lets update the tokens.
|
// Lets update the tokens.
|
||||||
let tokens = match crate::token::lexer(¶ms.text) {
|
let module_id = ModuleId::default();
|
||||||
|
let tokens = match crate::token::lexer(¶ms.text, module_id) {
|
||||||
Ok(tokens) => tokens,
|
Ok(tokens) => tokens,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
self.add_to_diagnostics(¶ms, &[err], true).await;
|
self.add_to_diagnostics(¶ms, &[err], true).await;
|
||||||
@ -1235,7 +1236,8 @@ impl LanguageServer for Backend {
|
|||||||
// Parse the ast.
|
// Parse the ast.
|
||||||
// I don't know if we need to do this again since it should be updated in the context.
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
// But I figure better safe than sorry since this will write back out to the file.
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
let Ok(tokens) = crate::token::lexer(current_code) else {
|
let module_id = ModuleId::default();
|
||||||
|
let Ok(tokens) = crate::token::lexer(current_code, module_id) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
@ -1251,7 +1253,7 @@ impl LanguageServer for Backend {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
let source_range = SourceRange([0, current_code.len()]);
|
let source_range = SourceRange::new(0, current_code.len(), module_id);
|
||||||
let range = source_range.to_lsp_range(current_code);
|
let range = source_range.to_lsp_range(current_code);
|
||||||
Ok(Some(vec![TextEdit {
|
Ok(Some(vec![TextEdit {
|
||||||
new_text: recast,
|
new_text: recast,
|
||||||
@ -1272,7 +1274,8 @@ impl LanguageServer for Backend {
|
|||||||
// Parse the ast.
|
// Parse the ast.
|
||||||
// I don't know if we need to do this again since it should be updated in the context.
|
// I don't know if we need to do this again since it should be updated in the context.
|
||||||
// But I figure better safe than sorry since this will write back out to the file.
|
// But I figure better safe than sorry since this will write back out to the file.
|
||||||
let Ok(tokens) = crate::token::lexer(current_code) else {
|
let module_id = ModuleId::default();
|
||||||
|
let Ok(tokens) = crate::token::lexer(current_code, module_id) else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens);
|
||||||
@ -1286,7 +1289,7 @@ impl LanguageServer for Backend {
|
|||||||
ast.rename_symbol(¶ms.new_name, pos);
|
ast.rename_symbol(¶ms.new_name, pos);
|
||||||
// Now recast it.
|
// Now recast it.
|
||||||
let recast = ast.recast(&Default::default(), 0);
|
let recast = ast.recast(&Default::default(), 0);
|
||||||
let source_range = SourceRange([0, current_code.len() - 1]);
|
let source_range = SourceRange::new(0, current_code.len() - 1, module_id);
|
||||||
let range = source_range.to_lsp_range(current_code);
|
let range = source_range.to_lsp_range(current_code);
|
||||||
Ok(Some(WorkspaceEdit {
|
Ok(Some(WorkspaceEdit {
|
||||||
changes: Some(HashMap::from([(
|
changes: Some(HashMap::from([(
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{Node, Program},
|
ast::types::{ModuleId, Node, Program},
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
token::{Token, TokenType},
|
token::{Token, TokenType},
|
||||||
@ -12,9 +12,15 @@ pub(crate) mod parser_impl;
|
|||||||
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
|
pub const PIPE_SUBSTITUTION_OPERATOR: &str = "%";
|
||||||
pub const PIPE_OPERATOR: &str = "|>";
|
pub const PIPE_OPERATOR: &str = "|>";
|
||||||
|
|
||||||
|
/// Parse the given KCL code into an AST. This is the top-level.
|
||||||
|
pub fn top_level_parse(code: &str) -> Result<Node<Program>, KclError> {
|
||||||
|
let module_id = ModuleId::default();
|
||||||
|
parse(code, module_id)
|
||||||
|
}
|
||||||
|
|
||||||
/// Parse the given KCL code into an AST.
|
/// Parse the given KCL code into an AST.
|
||||||
pub fn parse(code: &str) -> Result<Node<Program>, KclError> {
|
pub fn parse(code: &str, module_id: ModuleId) -> Result<Node<Program>, KclError> {
|
||||||
let tokens = crate::token::lexer(code)?;
|
let tokens = crate::token::lexer(code, module_id)?;
|
||||||
let parser = Parser::new(tokens);
|
let parser = Parser::new(tokens);
|
||||||
parser.ast()
|
parser.ast()
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,8 @@ mod tests {
|
|||||||
($func_name:ident, $test_kcl_program:expr) => {
|
($func_name:ident, $test_kcl_program:expr) => {
|
||||||
#[test]
|
#[test]
|
||||||
fn $func_name() {
|
fn $func_name() {
|
||||||
if let Ok(v) = $crate::token::lexer($test_kcl_program) {
|
let module_id = $crate::parser::ModuleId::default();
|
||||||
|
if let Ok(v) = $crate::token::lexer($test_kcl_program, module_id) {
|
||||||
let _ = $crate::parser::Parser::new(v).ast();
|
let _ = $crate::parser::Parser::new(v).ast();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ fn evaluate(rpn: Vec<BinaryExpressionToken>) -> Result<Node<BinaryExpression>, K
|
|||||||
};
|
};
|
||||||
let start = left.start();
|
let start = left.start();
|
||||||
let end = right.end();
|
let end = right.end();
|
||||||
|
let module_id = left.module_id();
|
||||||
|
|
||||||
BinaryPart::BinaryExpression(Node::boxed(
|
BinaryPart::BinaryExpression(Node::boxed(
|
||||||
BinaryExpression {
|
BinaryExpression {
|
||||||
@ -40,6 +41,7 @@ fn evaluate(rpn: Vec<BinaryExpressionToken>) -> Result<Node<BinaryExpression>, K
|
|||||||
},
|
},
|
||||||
start,
|
start,
|
||||||
end,
|
end,
|
||||||
|
module_id,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
BinaryExpressionToken::Operand(o) => o,
|
BinaryExpressionToken::Operand(o) => o,
|
||||||
@ -60,11 +62,11 @@ fn source_range(tokens: &[BinaryExpressionToken]) -> Vec<SourceRange> {
|
|||||||
.iter()
|
.iter()
|
||||||
.filter_map(|op| match op {
|
.filter_map(|op| match op {
|
||||||
BinaryExpressionToken::Operator(_) => None,
|
BinaryExpressionToken::Operator(_) => None,
|
||||||
BinaryExpressionToken::Operand(o) => Some((o.start(), o.end())),
|
BinaryExpressionToken::Operand(o) => Some((o.start(), o.end(), o.module_id())),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
match (sources.first(), sources.last()) {
|
match (sources.first(), sources.last()) {
|
||||||
(Some((start, _)), Some((_, end))) => vec![SourceRange([*start, *end])],
|
(Some((start, _, module_id)), Some((_, end, _))) => vec![SourceRange([*start, *end, module_id.as_usize()])],
|
||||||
_ => Vec::new(),
|
_ => Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -124,7 +126,7 @@ impl From<BinaryOperator> for BinaryExpressionToken {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::ast::types::Literal;
|
use crate::ast::types::{Literal, ModuleId};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_and_evaluate() {
|
fn parse_and_evaluate() {
|
||||||
@ -138,6 +140,7 @@ mod tests {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
|
ModuleId::default(),
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
let tests: Vec<Vec<BinaryExpressionToken>> = vec![
|
let tests: Vec<Vec<BinaryExpressionToken>> = vec![
|
||||||
@ -158,6 +161,7 @@ mod tests {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
|
ModuleId::default(),
|
||||||
))
|
))
|
||||||
.into(),
|
.into(),
|
||||||
BinaryOperator::Pow.into(),
|
BinaryOperator::Pow.into(),
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,12 @@
|
|||||||
use winnow::{
|
use winnow::{
|
||||||
error::{ErrorKind, ParseError, StrContext},
|
error::{ErrorKind, ParseError, StrContext},
|
||||||
stream::Stream,
|
stream::Stream,
|
||||||
Located,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
errors::{KclError, KclErrorDetails},
|
errors::{KclError, KclErrorDetails},
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
token::Token,
|
token::{Input, Token},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Accumulate context while backtracking errors
|
/// Accumulate context while backtracking errors
|
||||||
@ -20,9 +19,10 @@ pub struct ContextError<C = StrContext> {
|
|||||||
pub cause: Option<KclError>,
|
pub cause: Option<KclError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
|
impl From<ParseError<Input<'_>, winnow::error::ContextError>> for KclError {
|
||||||
fn from(err: ParseError<Located<&str>, winnow::error::ContextError>) -> Self {
|
fn from(err: ParseError<Input<'_>, winnow::error::ContextError>) -> Self {
|
||||||
let (input, offset): (Vec<char>, usize) = (err.input().chars().collect(), err.offset());
|
let (input, offset): (Vec<char>, usize) = (err.input().chars().collect(), err.offset());
|
||||||
|
let module_id = err.input().state.module_id;
|
||||||
|
|
||||||
if offset >= input.len() {
|
if offset >= input.len() {
|
||||||
// From the winnow docs:
|
// From the winnow docs:
|
||||||
@ -31,7 +31,7 @@ impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
|
|||||||
// the end of input (input.len()) on eof errors.
|
// the end of input (input.len()) on eof errors.
|
||||||
|
|
||||||
return KclError::Lexical(KclErrorDetails {
|
return KclError::Lexical(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([offset, offset])],
|
source_ranges: vec![SourceRange([offset, offset, module_id.as_usize()])],
|
||||||
message: "unexpected EOF while parsing".to_string(),
|
message: "unexpected EOF while parsing".to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -42,7 +42,7 @@ impl From<ParseError<Located<&str>, winnow::error::ContextError>> for KclError {
|
|||||||
// TODO: Add the Winnow parser context to the error.
|
// TODO: Add the Winnow parser context to the error.
|
||||||
// See https://github.com/KittyCAD/modeling-app/issues/784
|
// See https://github.com/KittyCAD/modeling-app/issues/784
|
||||||
KclError::Lexical(KclErrorDetails {
|
KclError::Lexical(KclErrorDetails {
|
||||||
source_ranges: vec![SourceRange([offset, offset + 1])],
|
source_ranges: vec![SourceRange([offset, offset + 1, module_id.as_usize()])],
|
||||||
message: format!("found unknown token '{}'", bad_token),
|
message: format!("found unknown token '{}'", bad_token),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{Node, Program},
|
ast::types::{ModuleId, Node, Program},
|
||||||
errors::KclError,
|
errors::KclError,
|
||||||
parser::Parser,
|
parser::Parser,
|
||||||
token::Token,
|
token::Token,
|
||||||
@ -44,7 +44,7 @@ fn read(filename: &'static str, test_name: &str) -> String {
|
|||||||
|
|
||||||
fn tokenize(test_name: &str) {
|
fn tokenize(test_name: &str) {
|
||||||
let input = read("input.kcl", test_name);
|
let input = read("input.kcl", test_name);
|
||||||
let token_res = crate::token::lexer(&input);
|
let token_res = crate::token::lexer(&input, ModuleId::default());
|
||||||
|
|
||||||
assert_snapshot(test_name, "Result of tokenizing", || {
|
assert_snapshot(test_name, "Result of tokenizing", || {
|
||||||
insta::assert_json_snapshot!("tokens", token_res);
|
insta::assert_json_snapshot!("tokens", token_res);
|
||||||
|
@ -6,7 +6,6 @@ use serde::{Deserialize, Serialize};
|
|||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{BodyItem, Expr, FunctionExpression, Node, Program},
|
ast::types::{BodyItem, Expr, FunctionExpression, Node, Program},
|
||||||
docs::{StdLibFn, StdLibFnData},
|
docs::{StdLibFn, StdLibFnData},
|
||||||
token::lexer,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub trait KclStdLibFn: StdLibFn {
|
pub trait KclStdLibFn: StdLibFn {
|
||||||
@ -83,8 +82,7 @@ impl Serialize for Box<dyn KclStdLibFn> {
|
|||||||
/// Return the program and its single function.
|
/// Return the program and its single function.
|
||||||
/// Return None if those expectations aren't met.
|
/// Return None if those expectations aren't met.
|
||||||
pub fn extract_function(source: &str) -> Option<(Node<Program>, crate::ast::types::BoxNode<FunctionExpression>)> {
|
pub fn extract_function(source: &str) -> Option<(Node<Program>, crate::ast::types::BoxNode<FunctionExpression>)> {
|
||||||
let tokens = lexer(source).unwrap();
|
let src = crate::parser::top_level_parse(source).ok()?;
|
||||||
let src = crate::parser::Parser::new(tokens).ast().ok()?;
|
|
||||||
assert_eq!(src.body.len(), 1);
|
assert_eq!(src.body.len(), 1);
|
||||||
let BodyItem::ExpressionStatement(expr) = src.body.last()? else {
|
let BodyItem::ExpressionStatement(expr) = src.body.last()? else {
|
||||||
panic!("expected expression statement");
|
panic!("expected expression statement");
|
||||||
|
@ -17,9 +17,7 @@ pub struct RequestBody {
|
|||||||
/// This returns the bytes of the snapshot.
|
/// This returns the bytes of the snapshot.
|
||||||
pub async fn execute_and_snapshot(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
|
pub async fn execute_and_snapshot(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
|
||||||
let ctx = new_context(units, true).await?;
|
let ctx = new_context(units, true).await?;
|
||||||
let tokens = crate::token::lexer(code)?;
|
let program = crate::parser::top_level_parse(code)?;
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast()?;
|
|
||||||
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
|
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,9 +35,7 @@ pub async fn execute_and_snapshot_ast(
|
|||||||
|
|
||||||
pub async fn execute_and_snapshot_no_auth(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
|
pub async fn execute_and_snapshot_no_auth(code: &str, units: UnitLength) -> anyhow::Result<image::DynamicImage> {
|
||||||
let ctx = new_context(units, false).await?;
|
let ctx = new_context(units, false).await?;
|
||||||
let tokens = crate::token::lexer(code)?;
|
let program = crate::parser::top_level_parse(code)?;
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast()?;
|
|
||||||
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
|
do_execute_and_snapshot(&ctx, program).await.map(|(_state, snap)| snap)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,13 +8,16 @@ use tower_lsp::lsp_types::SemanticTokenType;
|
|||||||
use winnow::stream::ContainsToken;
|
use winnow::stream::ContainsToken;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::types::{ItemVisibility, VariableKind},
|
ast::types::{ItemVisibility, ModuleId, VariableKind},
|
||||||
errors::KclError,
|
errors::KclError,
|
||||||
executor::SourceRange,
|
executor::SourceRange,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod tokeniser;
|
mod tokeniser;
|
||||||
|
|
||||||
|
// Re-export
|
||||||
|
pub use tokeniser::Input;
|
||||||
|
|
||||||
/// The types of tokens.
|
/// The types of tokens.
|
||||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, ts_rs::TS, JsonSchema, FromStr, Display)]
|
||||||
#[cfg_attr(feature = "pyo3", pyo3::pyclass(eq, eq_int))]
|
#[cfg_attr(feature = "pyo3", pyo3::pyclass(eq, eq_int))]
|
||||||
@ -161,6 +164,8 @@ pub struct Token {
|
|||||||
pub start: usize,
|
pub start: usize,
|
||||||
/// Offset in the source code where this token ends.
|
/// Offset in the source code where this token ends.
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
|
#[serde(default, skip_serializing_if = "ModuleId::is_top_level")]
|
||||||
|
pub module_id: ModuleId,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -177,10 +182,16 @@ impl ContainsToken<Token> for TokenType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Token {
|
impl Token {
|
||||||
pub fn from_range(range: std::ops::Range<usize>, token_type: TokenType, value: String) -> Self {
|
pub fn from_range(
|
||||||
|
range: std::ops::Range<usize>,
|
||||||
|
module_id: ModuleId,
|
||||||
|
token_type: TokenType,
|
||||||
|
value: String,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
start: range.start,
|
start: range.start,
|
||||||
end: range.end,
|
end: range.end,
|
||||||
|
module_id,
|
||||||
value,
|
value,
|
||||||
token_type,
|
token_type,
|
||||||
}
|
}
|
||||||
@ -193,7 +204,7 @@ impl Token {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_source_range(&self) -> SourceRange {
|
pub fn as_source_range(&self) -> SourceRange {
|
||||||
SourceRange([self.start, self.end])
|
SourceRange([self.start, self.end, self.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
|
pub fn as_source_ranges(&self) -> Vec<SourceRange> {
|
||||||
@ -227,18 +238,18 @@ impl Token {
|
|||||||
|
|
||||||
impl From<Token> for SourceRange {
|
impl From<Token> for SourceRange {
|
||||||
fn from(token: Token) -> Self {
|
fn from(token: Token) -> Self {
|
||||||
Self([token.start, token.end])
|
Self([token.start, token.end, token.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&Token> for SourceRange {
|
impl From<&Token> for SourceRange {
|
||||||
fn from(token: &Token) -> Self {
|
fn from(token: &Token) -> Self {
|
||||||
Self([token.start, token.end])
|
Self([token.start, token.end, token.module_id.as_usize()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lexer(s: &str) -> Result<Vec<Token>, KclError> {
|
pub fn lexer(s: &str, module_id: ModuleId) -> Result<Vec<Token>, KclError> {
|
||||||
tokeniser::lexer(s).map_err(From::from)
|
tokeniser::lexer(s, module_id).map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -573,7 +573,7 @@ impl FunctionExpression {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
use crate::ast::types::FormatOptions;
|
use crate::ast::types::{FormatOptions, ModuleId};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_recast_if_else_if_same() {
|
fn test_recast_if_else_if_same() {
|
||||||
@ -585,9 +585,7 @@ mod tests {
|
|||||||
5
|
5
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let program = crate::parser::top_level_parse(input).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let output = program.recast(&Default::default(), 0);
|
let output = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(output, input);
|
assert_eq!(output, input);
|
||||||
}
|
}
|
||||||
@ -600,9 +598,7 @@ mod tests {
|
|||||||
5
|
5
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let program = crate::parser::top_level_parse(input).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let output = program.recast(&Default::default(), 0);
|
let output = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(output, input);
|
assert_eq!(output, input);
|
||||||
}
|
}
|
||||||
@ -616,9 +612,7 @@ import a as aaa, b from "a.kcl"
|
|||||||
import a, b as bbb from "a.kcl"
|
import a, b as bbb from "a.kcl"
|
||||||
import a as aaa, b as bbb from "a.kcl"
|
import a as aaa, b as bbb from "a.kcl"
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let program = crate::parser::top_level_parse(input).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let output = program.recast(&Default::default(), 0);
|
let output = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(output, input);
|
assert_eq!(output, input);
|
||||||
}
|
}
|
||||||
@ -627,7 +621,7 @@ import a as aaa, b as bbb from "a.kcl"
|
|||||||
fn test_recast_import_as_same_name() {
|
fn test_recast_import_as_same_name() {
|
||||||
let input = r#"import a as a from "a.kcl"
|
let input = r#"import a as a from "a.kcl"
|
||||||
"#;
|
"#;
|
||||||
let program = crate::parser::parse(input).unwrap();
|
let program = crate::parser::top_level_parse(input).unwrap();
|
||||||
let output = program.recast(&Default::default(), 0);
|
let output = program.recast(&Default::default(), 0);
|
||||||
let expected = r#"import a from "a.kcl"
|
let expected = r#"import a from "a.kcl"
|
||||||
"#;
|
"#;
|
||||||
@ -640,9 +634,7 @@ import a as aaa, b as bbb from "a.kcl"
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let program = crate::parser::top_level_parse(input).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
let output = program.recast(&Default::default(), 0);
|
let output = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(output, input);
|
assert_eq!(output, input);
|
||||||
}
|
}
|
||||||
@ -765,9 +757,7 @@ fn zoo = (x0, y0) => {
|
|||||||
|
|
||||||
zoo(zoo_x, zoo_y)
|
zoo(zoo_x, zoo_y)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
@ -836,9 +826,7 @@ outsideRevolve = startSketchOn('XZ')
|
|||||||
|> line([overHangLength - thickness, 0], %)
|
|> line([overHangLength - thickness, 0], %)
|
||||||
|> close(%)
|
|> close(%)
|
||||||
|> revolve({ axis: 'y' }, %)"#;
|
|> revolve({ axis: 'y' }, %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -914,9 +902,7 @@ outsideRevolve = startSketchOn('XZ')
|
|||||||
let some_program_string = r#"bing = { yo: 55 }
|
let some_program_string = r#"bing = { yo: 55 }
|
||||||
myNestedVar = [{ prop: callExp(bing.yo) }]
|
myNestedVar = [{ prop: callExp(bing.yo) }]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
@ -927,9 +913,7 @@ myNestedVar = [{ prop: callExp(bing.yo) }]
|
|||||||
let some_program_string = r#"bing = { yo: 55 }
|
let some_program_string = r#"bing = { yo: 55 }
|
||||||
myNestedVar = [callExp(bing.yo)]
|
myNestedVar = [callExp(bing.yo)]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
@ -941,9 +925,7 @@ myNestedVar = [callExp(bing.yo)]
|
|||||||
ten = 10
|
ten = 10
|
||||||
bar = [0 + 1 .. ten]
|
bar = [0 + 1 .. ten]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
@ -957,9 +939,7 @@ bar = [0 + 1 .. ten]
|
|||||||
|
|
||||||
thing ( 1 )
|
thing ( 1 )
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -982,9 +962,7 @@ myNestedVar = [
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1000,9 +978,7 @@ myNestedVar = [
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_recast_empty_file() {
|
fn test_recast_empty_file() {
|
||||||
let some_program_string = r#""#;
|
let some_program_string = r#""#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
// Its VERY important this comes back with zero new lines.
|
// Its VERY important this comes back with zero new lines.
|
||||||
@ -1013,9 +989,7 @@ myNestedVar = [
|
|||||||
fn test_recast_empty_file_new_line() {
|
fn test_recast_empty_file_new_line() {
|
||||||
let some_program_string = r#"
|
let some_program_string = r#"
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
// Its VERY important this comes back with zero new lines.
|
// Its VERY important this comes back with zero new lines.
|
||||||
@ -1026,14 +1000,12 @@ myNestedVar = [
|
|||||||
fn test_recast_shebang_only() {
|
fn test_recast_shebang_only() {
|
||||||
let some_program_string = r#"#!/usr/local/env zoo kcl"#;
|
let some_program_string = r#"#!/usr/local/env zoo kcl"#;
|
||||||
|
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let result = crate::parser::top_level_parse(some_program_string);
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let result = parser.ast();
|
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.unwrap_err().to_string(),
|
result.unwrap_err().to_string(),
|
||||||
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([21, 24])], message: "Unexpected end of file. The compiler expected a function body items (functions are made up of variable declarations, expressions, and return statements, each of those is a possible body item" }"#
|
r#"syntax: KclErrorDetails { source_ranges: [SourceRange([21, 24, 0])], message: "Unexpected end of file. The compiler expected a function body items (functions are made up of variable declarations, expressions, and return statements, each of those is a possible body item" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1048,9 +1020,7 @@ part001 = startSketchOn('XY')
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1081,9 +1051,7 @@ part001 = startSketchOn('XY')
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1113,9 +1081,7 @@ part001 = startSketchOn('XY')
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1253,9 +1219,7 @@ tabs_l = startSketchOn({
|
|||||||
distance: length - 10
|
distance: length - 10
|
||||||
}, %)
|
}, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
// Its VERY important this comes back with zero new lines.
|
// Its VERY important this comes back with zero new lines.
|
||||||
@ -1393,9 +1357,7 @@ tabs_l = startSketchOn({
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
|> extrude(scale, %)
|
|> extrude(scale, %)
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1419,9 +1381,7 @@ tabs_l = startSketchOn({
|
|||||||
|> startProfileAt([0.0, 5.0], %)
|
|> startProfileAt([0.0, 5.0], %)
|
||||||
|> line([0.4900857016, -0.0240763666], %)
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1440,9 +1400,7 @@ tabs_l = startSketchOn({
|
|||||||
|> startProfileAt([0.0, 5.0], %)
|
|> startProfileAt([0.0, 5.0], %)
|
||||||
|> line([0.4900857016, -0.0240763666], %) // hello world
|
|> line([0.4900857016, -0.0240763666], %) // hello world
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1461,9 +1419,7 @@ tabs_l = startSketchOn({
|
|||||||
|> line([0.4900857016, -0.0240763666], %)
|
|> line([0.4900857016, -0.0240763666], %)
|
||||||
// hello world
|
// hello world
|
||||||
|> line([0.6804562304, 0.9087880491], %)"#;
|
|> line([0.6804562304, 0.9087880491], %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1488,9 +1444,7 @@ tabs_l = startSketchOn({
|
|||||||
// this is also a comment
|
// this is also a comment
|
||||||
return things
|
return things
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1514,9 +1468,7 @@ tabs_l = startSketchOn({
|
|||||||
// this is also a comment
|
// this is also a comment
|
||||||
thing = 'foo'
|
thing = 'foo'
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1537,9 +1489,7 @@ key = 'c'
|
|||||||
// hello
|
// hello
|
||||||
thing = 'foo'
|
thing = 'foo'
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1567,9 +1517,7 @@ thing = 'c'
|
|||||||
|
|
||||||
foo = 'bar' //
|
foo = 'bar' //
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1595,9 +1543,7 @@ foo = 'bar' //
|
|||||||
// hello
|
// hello
|
||||||
thing = 'foo'
|
thing = 'foo'
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1616,9 +1562,7 @@ thing = 'foo'
|
|||||||
/* comment at start */
|
/* comment at start */
|
||||||
|
|
||||||
mySk1 = startSketchAt([0, 0])"#;
|
mySk1 = startSketchAt([0, 0])"#;
|
||||||
let tokens = crate::token::lexer(test_program).unwrap();
|
let program = crate::parser::top_level_parse(test_program).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1648,9 +1592,7 @@ mySk1 = startSketchOn('XY')
|
|||||||
|> ry(45, %)
|
|> ry(45, %)
|
||||||
|> rx(45, %)
|
|> rx(45, %)
|
||||||
// one more for good measure"#;
|
// one more for good measure"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1686,9 +1628,7 @@ mySk1 = startSketchOn('XY')
|
|||||||
intersectTag: seg01
|
intersectTag: seg01
|
||||||
}, %)
|
}, %)
|
||||||
|> line([-0.42, -1.72], %)"#;
|
|> line([-0.42, -1.72], %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -1712,9 +1652,7 @@ yo = [
|
|||||||
" hey oooooo really long long long"
|
" hey oooooo really long long long"
|
||||||
]
|
]
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted, some_program_string);
|
assert_eq!(recasted, some_program_string);
|
||||||
@ -1730,9 +1668,7 @@ key = 'c'
|
|||||||
things = "things"
|
things = "things"
|
||||||
|
|
||||||
// this is also a comment"#;
|
// this is also a comment"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
let expected = some_program_string.trim();
|
let expected = some_program_string.trim();
|
||||||
@ -1751,9 +1687,7 @@ things = "things"
|
|||||||
// a comment
|
// a comment
|
||||||
"
|
"
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string.trim());
|
assert_eq!(recasted.trim(), some_program_string.trim());
|
||||||
@ -1777,9 +1711,7 @@ part001 = startSketchOn('XY')
|
|||||||
-angleToMatchLengthY(seg01, myVar, %),
|
-angleToMatchLengthY(seg01, myVar, %),
|
||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -1804,9 +1736,7 @@ part001 = startSketchOn('XY')
|
|||||||
myVar
|
myVar
|
||||||
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
], %) // ln-lineTo-yAbsolute should use angleToMatchLengthY helper
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(
|
let recasted = program.recast(
|
||||||
&FormatOptions {
|
&FormatOptions {
|
||||||
@ -1835,9 +1765,7 @@ fn ghi = (part001) => {
|
|||||||
return part001
|
return part001
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let mut program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let mut program = parser.ast().unwrap();
|
|
||||||
program.rename_symbol("mySuperCoolPart", 6);
|
program.rename_symbol("mySuperCoolPart", 6);
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
@ -1865,9 +1793,7 @@ fn ghi = (part001) => {
|
|||||||
let some_program_string = r#"fn ghi = (x, y, z) => {
|
let some_program_string = r#"fn ghi = (x, y, z) => {
|
||||||
return x
|
return x
|
||||||
}"#;
|
}"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let mut program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let mut program = parser.ast().unwrap();
|
|
||||||
program.rename_symbol("newName", 10);
|
program.rename_symbol("newName", 10);
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
@ -1889,9 +1815,7 @@ fn ghi = (part001) => {
|
|||||||
angle_start: 0,
|
angle_start: 0,
|
||||||
angle_end: 180,
|
angle_end: 180,
|
||||||
}, %)"#;
|
}, %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1921,9 +1845,7 @@ firstExtrude = startSketchOn('XY')
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
|> extrude(h, %)
|
|> extrude(h, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1960,9 +1882,7 @@ firstExtrude = startSketchOn('XY')
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
|> extrude(h, %)
|
|> extrude(h, %)
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -1988,9 +1908,7 @@ firstExtrude = startSketchOn('XY')
|
|||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_recast_math_start_negative() {
|
async fn test_recast_math_start_negative() {
|
||||||
let some_program_string = r#"myVar = -5 + 6"#;
|
let some_program_string = r#"myVar = -5 + 6"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -2007,9 +1925,7 @@ startSketchOn('XY')
|
|||||||
|> line([0, -(5 - thickness)], %)
|
|> line([0, -(5 - thickness)], %)
|
||||||
|> line([0, -(5 - 1)], %)
|
|> line([0, -(5 - 1)], %)
|
||||||
|> line([0, -(-5 - 1)], %)"#;
|
|> line([0, -(-5 - 1)], %)"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -2023,9 +1939,7 @@ FOS = 2
|
|||||||
sigmaAllow = 8
|
sigmaAllow = 8
|
||||||
width = 20
|
width = 20
|
||||||
thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -2034,9 +1948,7 @@ thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
|||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn no_vardec_keyword() {
|
async fn no_vardec_keyword() {
|
||||||
let some_program_string = r#"distance = 5"#;
|
let some_program_string = r#"distance = 5"#;
|
||||||
let tokens = crate::token::lexer(some_program_string).unwrap();
|
let program = crate::parser::top_level_parse(some_program_string).unwrap();
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
|
||||||
let program = parser.ast().unwrap();
|
|
||||||
|
|
||||||
let recasted = program.recast(&Default::default(), 0);
|
let recasted = program.recast(&Default::default(), 0);
|
||||||
assert_eq!(recasted.trim(), some_program_string);
|
assert_eq!(recasted.trim(), some_program_string);
|
||||||
@ -2066,7 +1978,7 @@ thickness = sqrt(distance * p * FOS * 6 / (sigmaAllow * width))"#;
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
{
|
{
|
||||||
let tokens = crate::token::lexer(raw).unwrap();
|
let tokens = crate::token::lexer(raw, ModuleId::default()).unwrap();
|
||||||
let literal = crate::parser::parser_impl::unsigned_number_literal
|
let literal = crate::parser::parser_impl::unsigned_number_literal
|
||||||
.parse(&tokens)
|
.parse(&tokens)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -2099,9 +2011,7 @@ sketch002 = startSketchOn({
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
"#;
|
"#;
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let ast = crate::parser::top_level_parse(input).unwrap();
|
||||||
let p = crate::parser::Parser::new(tokens);
|
|
||||||
let ast = p.ast().unwrap();
|
|
||||||
let actual = ast.recast(&FormatOptions::new(), 0);
|
let actual = ast.recast(&FormatOptions::new(), 0);
|
||||||
assert_eq!(actual, expected);
|
assert_eq!(actual, expected);
|
||||||
}
|
}
|
||||||
@ -2127,7 +2037,7 @@ sketch002 = startSketchOn({
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
{
|
{
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let tokens = crate::token::lexer(input, ModuleId::default()).unwrap();
|
||||||
crate::parser::parser_impl::print_tokens(&tokens);
|
crate::parser::parser_impl::print_tokens(&tokens);
|
||||||
let expr = crate::parser::parser_impl::object.parse(&tokens).unwrap();
|
let expr = crate::parser::parser_impl::object.parse(&tokens).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -2225,7 +2135,7 @@ sketch002 = startSketchOn({
|
|||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
{
|
{
|
||||||
let tokens = crate::token::lexer(input).unwrap();
|
let tokens = crate::token::lexer(input, ModuleId::default()).unwrap();
|
||||||
let expr = crate::parser::parser_impl::array_elem_by_elem.parse(&tokens).unwrap();
|
let expr = crate::parser::parser_impl::array_elem_by_elem.parse(&tokens).unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
expr.recast(&FormatOptions::new(), 0, false),
|
expr.recast(&FormatOptions::new(), 0, false),
|
||||||
|
@ -42,30 +42,30 @@ pub enum Node<'a> {
|
|||||||
impl From<&Node<'_>> for SourceRange {
|
impl From<&Node<'_>> for SourceRange {
|
||||||
fn from(node: &Node) -> Self {
|
fn from(node: &Node) -> Self {
|
||||||
match node {
|
match node {
|
||||||
Node::Program(p) => SourceRange([p.start, p.end]),
|
Node::Program(n) => SourceRange::from(*n),
|
||||||
Node::ImportStatement(e) => SourceRange([e.start, e.end]),
|
Node::ImportStatement(n) => SourceRange::from(*n),
|
||||||
Node::ExpressionStatement(e) => SourceRange([e.start, e.end]),
|
Node::ExpressionStatement(n) => SourceRange::from(*n),
|
||||||
Node::VariableDeclaration(v) => SourceRange([v.start, v.end]),
|
Node::VariableDeclaration(n) => SourceRange::from(*n),
|
||||||
Node::ReturnStatement(r) => SourceRange([r.start, r.end]),
|
Node::ReturnStatement(n) => SourceRange::from(*n),
|
||||||
Node::VariableDeclarator(v) => SourceRange([v.start, v.end]),
|
Node::VariableDeclarator(n) => SourceRange::from(*n),
|
||||||
Node::Literal(l) => SourceRange([l.start, l.end]),
|
Node::Literal(n) => SourceRange::from(*n),
|
||||||
Node::TagDeclarator(t) => SourceRange([t.start, t.end]),
|
Node::TagDeclarator(n) => SourceRange::from(*n),
|
||||||
Node::Identifier(i) => SourceRange([i.start, i.end]),
|
Node::Identifier(n) => SourceRange::from(*n),
|
||||||
Node::BinaryExpression(b) => SourceRange([b.start, b.end]),
|
Node::BinaryExpression(n) => SourceRange::from(*n),
|
||||||
Node::FunctionExpression(f) => SourceRange([f.start, f.end]),
|
Node::FunctionExpression(n) => SourceRange::from(*n),
|
||||||
Node::CallExpression(c) => SourceRange([c.start, c.end]),
|
Node::CallExpression(n) => SourceRange::from(*n),
|
||||||
Node::PipeExpression(p) => SourceRange([p.start, p.end]),
|
Node::PipeExpression(n) => SourceRange::from(*n),
|
||||||
Node::PipeSubstitution(p) => SourceRange([p.start, p.end]),
|
Node::PipeSubstitution(n) => SourceRange::from(*n),
|
||||||
Node::ArrayExpression(a) => SourceRange([a.start, a.end]),
|
Node::ArrayExpression(n) => SourceRange::from(*n),
|
||||||
Node::ArrayRangeExpression(a) => SourceRange([a.start, a.end]),
|
Node::ArrayRangeExpression(n) => SourceRange::from(*n),
|
||||||
Node::ObjectExpression(o) => SourceRange([o.start, o.end]),
|
Node::ObjectExpression(n) => SourceRange::from(*n),
|
||||||
Node::MemberExpression(m) => SourceRange([m.start, m.end]),
|
Node::MemberExpression(n) => SourceRange::from(*n),
|
||||||
Node::UnaryExpression(u) => SourceRange([u.start, u.end]),
|
Node::UnaryExpression(n) => SourceRange::from(*n),
|
||||||
Node::Parameter(p) => SourceRange([p.identifier.start, p.identifier.end]),
|
Node::Parameter(p) => SourceRange::from(&p.identifier),
|
||||||
Node::ObjectProperty(o) => SourceRange([o.start, o.end]),
|
Node::ObjectProperty(n) => SourceRange::from(*n),
|
||||||
Node::MemberObject(m) => SourceRange([m.start(), m.end()]),
|
Node::MemberObject(m) => SourceRange([m.start(), m.end(), m.module_id().as_usize()]),
|
||||||
Node::IfExpression(m) => SourceRange([m.start, m.end]),
|
Node::IfExpression(n) => SourceRange::from(*n),
|
||||||
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end()]),
|
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end(), l.module_id().as_usize()]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -315,9 +315,7 @@ mod tests {
|
|||||||
|
|
||||||
macro_rules! kcl {
|
macro_rules! kcl {
|
||||||
( $kcl:expr ) => {{
|
( $kcl:expr ) => {{
|
||||||
let tokens = $crate::token::lexer($kcl).unwrap();
|
$crate::parser::top_level_parse($kcl).unwrap()
|
||||||
let parser = $crate::parser::Parser::new(tokens);
|
|
||||||
parser.ast().unwrap()
|
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,7 +123,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
7,
|
7,
|
||||||
32
|
32,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -136,7 +137,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
38,
|
38,
|
||||||
834
|
834,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -43,7 +43,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
6,
|
6,
|
||||||
15
|
15,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -61,7 +62,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
27,
|
27,
|
||||||
39
|
39,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -80,7 +82,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
51,
|
51,
|
||||||
68
|
68,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -39,7 +39,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
175,
|
175,
|
||||||
188
|
188,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -52,7 +53,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
79,
|
79,
|
||||||
80
|
80,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -71,7 +73,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
5,
|
5,
|
||||||
11
|
11,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -90,7 +93,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
95,
|
95,
|
||||||
107
|
107,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -110,7 +114,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
194,
|
194,
|
||||||
206
|
206,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -128,7 +133,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
341,
|
341,
|
||||||
373
|
373,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -141,7 +147,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
88,
|
88,
|
||||||
89
|
89,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -51,7 +51,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
5,
|
5,
|
||||||
19
|
19,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -751,7 +751,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
10,
|
10,
|
||||||
316
|
316,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -766,7 +767,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
206,
|
206,
|
||||||
219
|
219,
|
||||||
|
0
|
||||||
],
|
],
|
||||||
"tag": null,
|
"tag": null,
|
||||||
"type": "extrudePlane"
|
"type": "extrudePlane"
|
||||||
@ -776,7 +778,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
225,
|
225,
|
||||||
238
|
238,
|
||||||
|
0
|
||||||
],
|
],
|
||||||
"tag": null,
|
"tag": null,
|
||||||
"type": "extrudePlane"
|
"type": "extrudePlane"
|
||||||
@ -786,7 +789,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
244,
|
244,
|
||||||
257
|
257,
|
||||||
|
0
|
||||||
],
|
],
|
||||||
"tag": null,
|
"tag": null,
|
||||||
"type": "extrudePlane"
|
"type": "extrudePlane"
|
||||||
@ -796,7 +800,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
263,
|
263,
|
||||||
276
|
276,
|
||||||
|
0
|
||||||
],
|
],
|
||||||
"tag": null,
|
"tag": null,
|
||||||
"type": "extrudePlane"
|
"type": "extrudePlane"
|
||||||
@ -811,7 +816,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
206,
|
206,
|
||||||
219
|
219,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -830,7 +836,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
225,
|
225,
|
||||||
238
|
238,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -849,7 +856,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
244,
|
244,
|
||||||
257
|
257,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -868,7 +876,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
263,
|
263,
|
||||||
276
|
276,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -887,7 +896,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
282,
|
282,
|
||||||
290
|
290,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -942,7 +952,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
183,
|
183,
|
||||||
200
|
200,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -950,7 +961,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
183,
|
183,
|
||||||
200
|
200,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -962,7 +974,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
183,
|
183,
|
||||||
200
|
200,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: kcl/src/tests.rs
|
source: kcl/src/simulation_tests.rs
|
||||||
description: Result of tokenizing cube.kcl
|
description: Result of tokenizing cube.kcl
|
||||||
snapshot_kind: text
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
|
@ -123,7 +123,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
15,
|
15,
|
||||||
40
|
40,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -140,7 +141,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
47,
|
47,
|
||||||
53
|
53,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -157,7 +159,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
90,
|
90,
|
||||||
107
|
107,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -41,7 +41,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
35,
|
35,
|
||||||
76
|
76,
|
||||||
|
0
|
||||||
],
|
],
|
||||||
"tag": null,
|
"tag": null,
|
||||||
"type": "extrudeArc"
|
"type": "extrudeArc"
|
||||||
@ -56,7 +57,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
35,
|
35,
|
||||||
76
|
76,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"ccw": true,
|
"ccw": true,
|
||||||
@ -117,7 +119,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
35,
|
35,
|
||||||
76
|
76,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -125,7 +128,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
35,
|
35,
|
||||||
76
|
76,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -137,7 +141,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
35,
|
35,
|
||||||
76
|
76,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -39,7 +39,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
64,
|
64,
|
||||||
65
|
65,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -52,7 +53,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
199,
|
199,
|
||||||
200
|
200,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -65,7 +67,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
332,
|
332,
|
||||||
333
|
333,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -43,7 +43,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
43,
|
43,
|
||||||
55
|
55,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -56,7 +57,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
256,
|
256,
|
||||||
266
|
266,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -69,7 +71,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
93,
|
93,
|
||||||
101
|
101,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -82,7 +85,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
277,
|
277,
|
||||||
285
|
285,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -42,7 +42,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
56,
|
56,
|
||||||
74
|
74,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -60,7 +61,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
529,
|
529,
|
||||||
543
|
543,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -73,7 +75,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
122,
|
122,
|
||||||
132
|
132,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -86,7 +89,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
356,
|
356,
|
||||||
362
|
362,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -99,7 +103,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
553,
|
553,
|
||||||
570
|
570,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -112,7 +117,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
757,
|
757,
|
||||||
770
|
770,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -125,7 +131,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
342,
|
342,
|
||||||
347
|
347,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -329,7 +329,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
10,
|
10,
|
||||||
157
|
157,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -957,7 +958,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
10,
|
10,
|
||||||
157
|
157,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -973,7 +975,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
170,
|
170,
|
||||||
369
|
369,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -986,7 +989,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
52,
|
52,
|
||||||
77
|
77,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -1023,7 +1027,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
83,
|
83,
|
||||||
98
|
98,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1042,7 +1047,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
104,
|
104,
|
||||||
119
|
119,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1061,7 +1067,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
125,
|
125,
|
||||||
141
|
141,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1080,7 +1087,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
147,
|
147,
|
||||||
155
|
155,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1100,7 +1108,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
52,
|
52,
|
||||||
77
|
77,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1119,7 +1128,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
52,
|
52,
|
||||||
77
|
77,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -1134,7 +1144,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
242,
|
242,
|
||||||
267
|
267,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -1171,7 +1182,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
277,
|
277,
|
||||||
292
|
292,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1190,7 +1202,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
302,
|
302,
|
||||||
317
|
317,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1209,7 +1222,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
327,
|
327,
|
||||||
343
|
343,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1228,7 +1242,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
353,
|
353,
|
||||||
361
|
361,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1248,7 +1263,8 @@ snapshot_kind: text
|
|||||||
"id": "[uuid]",
|
"id": "[uuid]",
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
242,
|
242,
|
||||||
267
|
267,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"from": [
|
"from": [
|
||||||
@ -1269,7 +1285,8 @@ snapshot_kind: text
|
|||||||
{
|
{
|
||||||
"sourceRange": [
|
"sourceRange": [
|
||||||
187,
|
187,
|
||||||
367
|
367,
|
||||||
|
0
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -8,7 +8,7 @@ use std::{
|
|||||||
use futures::stream::TryStreamExt;
|
use futures::stream::TryStreamExt;
|
||||||
use gloo_utils::format::JsValueSerdeExt;
|
use gloo_utils::format::JsValueSerdeExt;
|
||||||
use kcl_lib::{
|
use kcl_lib::{
|
||||||
ast::types::{Node, Program},
|
ast::types::{ModuleId, Node, Program},
|
||||||
coredump::CoreDump,
|
coredump::CoreDump,
|
||||||
engine::EngineManager,
|
engine::EngineManager,
|
||||||
executor::ExecutorSettings,
|
executor::ExecutorSettings,
|
||||||
@ -153,9 +153,11 @@ pub async fn modify_ast_for_sketch_wasm(
|
|||||||
.map_err(|e| format!("{:?}", e))?,
|
.map_err(|e| format!("{:?}", e))?,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
let module_id = ModuleId::default();
|
||||||
let _ = kcl_lib::ast::modify::modify_ast_for_sketch(
|
let _ = kcl_lib::ast::modify::modify_ast_for_sketch(
|
||||||
&engine,
|
&engine,
|
||||||
&mut program,
|
&mut program,
|
||||||
|
module_id,
|
||||||
sketch_name,
|
sketch_name,
|
||||||
plane,
|
plane,
|
||||||
uuid::Uuid::parse_str(sketch_id).map_err(|e| e.to_string())?,
|
uuid::Uuid::parse_str(sketch_id).map_err(|e| e.to_string())?,
|
||||||
@ -193,7 +195,8 @@ pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
|
|||||||
pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
|
pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
|
||||||
console_error_panic_hook::set_once();
|
console_error_panic_hook::set_once();
|
||||||
|
|
||||||
let tokens = kcl_lib::token::lexer(js).map_err(JsError::from)?;
|
let module_id = ModuleId::default();
|
||||||
|
let tokens = kcl_lib::token::lexer(js, module_id).map_err(JsError::from)?;
|
||||||
Ok(JsValue::from_serde(&tokens)?)
|
Ok(JsValue::from_serde(&tokens)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -201,7 +204,8 @@ pub fn lexer_wasm(js: &str) -> Result<JsValue, JsError> {
|
|||||||
pub fn parse_wasm(js: &str) -> Result<JsValue, String> {
|
pub fn parse_wasm(js: &str) -> Result<JsValue, String> {
|
||||||
console_error_panic_hook::set_once();
|
console_error_panic_hook::set_once();
|
||||||
|
|
||||||
let tokens = kcl_lib::token::lexer(js).map_err(String::from)?;
|
let module_id = ModuleId::default();
|
||||||
|
let tokens = kcl_lib::token::lexer(js, module_id).map_err(String::from)?;
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().map_err(String::from)?;
|
let program = parser.ast().map_err(String::from)?;
|
||||||
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
||||||
|
@ -28,7 +28,7 @@ async fn kcl_test_fillet_duplicate_tags() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([203, 249])], message: "Duplicate tags are not allowed." }"#,
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([203, 249, 0])], message: "Duplicate tags are not allowed." }"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,7 +83,7 @@ async fn kcl_test_execute_engine_error_return() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"engine: KclErrorDetails { source_ranges: [SourceRange([216, 229])], message: "Modeling command failed: [ApiError { error_code: BadRequest, message: \"The path is not closed. Solid2D construction requires a closed path!\" }]" }"#,
|
r#"engine: KclErrorDetails { source_ranges: [SourceRange([216, 229, 0])], message: "Modeling command failed: [ApiError { error_code: BadRequest, message: \"The path is not closed. Solid2D construction requires a closed path!\" }]" }"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -515,7 +515,7 @@ async fn kcl_test_import_file_doesnt_exist() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 27])], message: "File `thing.obj` does not exist." }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 27, 0])], message: "File `thing.obj` does not exist." }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -583,7 +583,7 @@ async fn kcl_test_import_ext_doesnt_match() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 76])], message: "The given format does not match the file extension. Expected: `gltf`, Given: `obj`" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([8, 76, 0])], message: "The given format does not match the file extension. Expected: `gltf`, Given: `obj`" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -742,7 +742,7 @@ part002 = startSketchOn(part001, part001.sketch.tags.here)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([88, 133]), SourceRange([210, 226])], message: "could not sketch tangential arc, because its center would be infinitely far away in the X direction" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([88, 133, 0]), SourceRange([210, 226, 0])], message: "could not sketch tangential arc, because its center would be infinitely far away in the X direction" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -799,7 +799,7 @@ async fn kcl_test_stdlib_kcl_error_right_code_path() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([151, 189])], message: "Expected an argument at index 1" }"#,
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([151, 189, 0])], message: "Expected an argument at index 1" }"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -869,7 +869,7 @@ part = rectShape([0, 0], 20, 20)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([863, 912])], message: "Argument at index 0 was supposed to be type kcl_lib::std::shapes::CircleData but found string (text)" }"#,
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([863, 912, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::shapes::CircleData but found string (text)" }"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -954,7 +954,7 @@ async fn kcl_test_revolve_bad_angle_low() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 308])], message: "Expected angle to be between -360 and 360 and not 0, found `-455`" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 308, 0])], message: "Expected angle to be between -360 and 360 and not 0, found `-455`" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -979,7 +979,7 @@ async fn kcl_test_revolve_bad_angle_high() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 307])], message: "Expected angle to be between -360 and 360 and not 0, found `455`" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([272, 307, 0])], message: "Expected angle to be between -360 and 360 and not 0, found `455`" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1073,7 +1073,7 @@ sketch001 = startSketchOn(box, revolveAxis)
|
|||||||
//this fails right now, but slightly differently, lets just say its enough for it to fail - mike
|
//this fails right now, but slightly differently, lets just say its enough for it to fail - mike
|
||||||
//assert_eq!(
|
//assert_eq!(
|
||||||
// result.err().unwrap().to_string(),
|
// result.err().unwrap().to_string(),
|
||||||
// r#"engine: KclErrorDetails { source_ranges: [SourceRange([346, 390])], message: "Modeling command failed: [ApiError { error_code: InternalEngine, message: \"Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis\" }]" }"#
|
// r#"engine: KclErrorDetails { source_ranges: [SourceRange([346, 390, 0])], message: "Modeling command failed: [ApiError { error_code: InternalEngine, message: \"Solid3D revolve failed: sketch profile must lie entirely on one side of the revolution axis\" }]" }"#
|
||||||
//);
|
//);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1354,7 +1354,7 @@ secondSketch = startSketchOn(part001, '')
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([260, 286])], message: "Argument at index 1 was supposed to be type kcl_lib::std::sketch::FaceTag but found string (text)" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([260, 286, 0])], message: "Argument at index 1 was supposed to be type kcl_lib::std::sketch::FaceTag but found string (text)" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1385,7 +1385,7 @@ extrusion = startSketchOn('XY')
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([68, 334]), SourceRange([428, 461])], message: "Expected 2 arguments, got 3" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([68, 334, 0]), SourceRange([428, 461, 0])], message: "Expected 2 arguments, got 3" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1681,7 +1681,7 @@ part001 = cube([0,0], 20)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([259, 345])], message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag." }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([259, 345, 0])], message: "You can only tag one edge at a time with a tagged chamfer. Either delete the tag for the chamfer fn if you don't need it OR separate into individual chamfer functions for each tag." }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1708,7 +1708,7 @@ let p = triangle(200)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"value already defined: KclErrorDetails { source_ranges: [SourceRange([311, 313]), SourceRange([326, 339])], message: "Cannot redefine `a`" }"#
|
r#"value already defined: KclErrorDetails { source_ranges: [SourceRange([311, 313, 0]), SourceRange([326, 339, 0])], message: "Cannot redefine `a`" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1783,7 +1783,7 @@ async fn kcl_test_arc_error_same_start_end() {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([57, 140])], message: "Arc start and end angles must be different" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([57, 140, 0])], message: "Arc start and end angles must be different" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1803,7 +1803,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 111])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 111, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1823,7 +1823,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112])], message: "Cannot have an x constrained angle of 270 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112, 0])], message: "Cannot have an x constrained angle of 270 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1843,7 +1843,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 110])], message: "Cannot have a y constrained angle of 0 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 110, 0])], message: "Cannot have a y constrained angle of 0 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1863,7 +1863,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([72, 112, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1883,7 +1883,7 @@ extrusion = extrude(10, sketch001)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1903,7 +1903,7 @@ extrusion = extrude(10, sketch001)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([68, 125, 0])], message: "Cannot have an x constrained angle of 90 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1925,7 +1925,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 142])], message: "Cannot have a y constrained angle of 0 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 142, 0])], message: "Cannot have a y constrained angle of 0 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1947,7 +1947,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 144])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 144, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1969,7 +1969,7 @@ example = extrude(10, exampleSketch)
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 145])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
r#"type: KclErrorDetails { source_ranges: [SourceRange([94, 145, 0])], message: "Cannot have a y constrained angle of 180 degrees" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1986,7 +1986,7 @@ someFunction('INVALID')
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([37, 61]), SourceRange([65, 88])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([37, 61, 0]), SourceRange([65, 88, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2007,7 +2007,7 @@ someFunction('INVALID')
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.err().unwrap().to_string(),
|
result.err().unwrap().to_string(),
|
||||||
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([89, 114]), SourceRange([126, 155]), SourceRange([159, 182])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
|
r#"semantic: KclErrorDetails { source_ranges: [SourceRange([89, 114, 0]), SourceRange([126, 155, 0]), SourceRange([159, 182, 0])], message: "Argument at index 0 was supposed to be type kcl_lib::std::sketch::SketchData but found string (text)" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use kcl_lib::{
|
use kcl_lib::{
|
||||||
ast::types::{Node, Program},
|
ast::types::{ModuleId, Node, Program},
|
||||||
errors::KclError,
|
errors::KclError,
|
||||||
executor::{ExecutorContext, IdGenerator},
|
executor::{ExecutorContext, IdGenerator},
|
||||||
parser,
|
parser,
|
||||||
@ -28,7 +28,8 @@ macro_rules! gen_test_parse_fail {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn setup(program: &str) -> (ExecutorContext, Node<Program>, IdGenerator) {
|
async fn setup(program: &str) -> (ExecutorContext, Node<Program>, IdGenerator) {
|
||||||
let tokens = kcl_lib::token::lexer(program).unwrap();
|
let module_id = ModuleId::default();
|
||||||
|
let tokens = kcl_lib::token::lexer(program, module_id).unwrap();
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||||
let program = parser.ast().unwrap();
|
let program = parser.ast().unwrap();
|
||||||
let ctx = kcl_lib::executor::ExecutorContext {
|
let ctx = kcl_lib::executor::ExecutorContext {
|
||||||
@ -60,7 +61,7 @@ async fn run_fail(code: &str) -> KclError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn run_parse_fail(code: &str) -> KclError {
|
async fn run_parse_fail(code: &str) -> KclError {
|
||||||
let Err(e) = parser::parse(code) else {
|
let Err(e) = parser::top_level_parse(code) else {
|
||||||
panic!("Expected this KCL program to fail to parse, but it (incorrectly) never threw an error.");
|
panic!("Expected this KCL program to fail to parse, but it (incorrectly) never threw an error.");
|
||||||
};
|
};
|
||||||
e
|
e
|
||||||
|
@ -2,7 +2,7 @@ use anyhow::Result;
|
|||||||
use kcl_lib::{
|
use kcl_lib::{
|
||||||
ast::{
|
ast::{
|
||||||
modify::modify_ast_for_sketch,
|
modify::modify_ast_for_sketch,
|
||||||
types::{Node, Program},
|
types::{ModuleId, Node, Program},
|
||||||
},
|
},
|
||||||
executor::{ExecutorContext, IdGenerator, KclValue, PlaneType, Sketch, SourceRange},
|
executor::{ExecutorContext, IdGenerator, KclValue, PlaneType, Sketch, SourceRange},
|
||||||
};
|
};
|
||||||
@ -10,10 +10,9 @@ use kittycad_modeling_cmds::{each_cmd as mcmd, length_unit::LengthUnit, shared::
|
|||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
/// Setup the engine and parse code for an ast.
|
/// Setup the engine and parse code for an ast.
|
||||||
async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>, uuid::Uuid)> {
|
async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>, ModuleId, uuid::Uuid)> {
|
||||||
let tokens = kcl_lib::token::lexer(code)?;
|
let module_id = ModuleId::default();
|
||||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
let program = kcl_lib::parser::parse(code, module_id)?;
|
||||||
let program = parser.ast()?;
|
|
||||||
let ctx = kcl_lib::executor::ExecutorContext::new_with_default_client(Default::default()).await?;
|
let ctx = kcl_lib::executor::ExecutorContext::new_with_default_client(Default::default()).await?;
|
||||||
let exec_state = ctx.run(&program, None, IdGenerator::default(), None).await?;
|
let exec_state = ctx.run(&program, None, IdGenerator::default(), None).await?;
|
||||||
|
|
||||||
@ -60,7 +59,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Node<Program>
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok((ctx, program, sketch_id))
|
Ok((ctx, program, module_id, sketch_id))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
@ -76,9 +75,9 @@ async fn kcl_test_modify_sketch_part001() {
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
|
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -101,9 +100,9 @@ async fn kcl_test_modify_sketch_part002() {
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
|
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -128,9 +127,9 @@ async fn kcl_test_modify_close_sketch() {
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
|
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -154,9 +153,9 @@ async fn kcl_test_modify_line_to_close_sketch() {
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
|
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -191,14 +190,14 @@ const {} = startSketchOn("XY")
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let result = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id).await;
|
let result = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id).await;
|
||||||
|
|
||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
result.unwrap_err().to_string(),
|
result.unwrap_err().to_string(),
|
||||||
r#"engine: KclErrorDetails { source_ranges: [SourceRange([188, 193])], message: "Sketch part002 is constrained `partial` and cannot be modified" }"#
|
r#"engine: KclErrorDetails { source_ranges: [SourceRange([188, 193, 0])], message: "Sketch part002 is constrained `partial` and cannot be modified" }"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -216,9 +215,9 @@ async fn kcl_test_modify_line_should_close_sketch() {
|
|||||||
name
|
name
|
||||||
);
|
);
|
||||||
|
|
||||||
let (ctx, program, sketch_id) = setup(&code, name).await.unwrap();
|
let (ctx, program, module_id, sketch_id) = setup(&code, name).await.unwrap();
|
||||||
let mut new_program = program.clone();
|
let mut new_program = program.clone();
|
||||||
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, name, PlaneType::XY, sketch_id)
|
let new_code = modify_ast_for_sketch(&ctx.engine, &mut new_program, module_id, name, PlaneType::XY, sketch_id)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user