Add non-code metadata to the ast (#15)

* Revert "wire up comments to ui (#11)"

This reverts commit bdf778530f.

* Revert "Add the ability to recast comments and some whitespace (#10)"

This reverts commit dd5022b38e.

* ast metadata

* clean up
This commit is contained in:
Kurt Hutten
2023-02-01 07:30:55 +11:00
committed by GitHub
parent bdf778530f
commit 29e06ec852
11 changed files with 433 additions and 653 deletions

View File

@ -42,7 +42,6 @@ function App() {
setError, setError,
errorState, errorState,
setProgramMemory, setProgramMemory,
tokens,
} = useStore((s) => ({ } = useStore((s) => ({
editorView: s.editorView, editorView: s.editorView,
setEditorView: s.setEditorView, setEditorView: s.setEditorView,
@ -61,7 +60,6 @@ function App() {
setError: s.setError, setError: s.setError,
errorState: s.errorState, errorState: s.errorState,
setProgramMemory: s.setProgramMemory, setProgramMemory: s.setProgramMemory,
tokens: s.tokens,
})) }))
// const onChange = React.useCallback((value: string, viewUpdate: ViewUpdate) => { // const onChange = React.useCallback((value: string, viewUpdate: ViewUpdate) => {
const onChange = (value: string, viewUpdate: ViewUpdate) => { const onChange = (value: string, viewUpdate: ViewUpdate) => {
@ -91,7 +89,7 @@ function App() {
} }
const tokens = lexer(code) const tokens = lexer(code)
const _ast = abstractSyntaxTree(tokens) const _ast = abstractSyntaxTree(tokens)
setAst(_ast, tokens) setAst(_ast)
const programMemory = executor(_ast, { const programMemory = executor(_ast, {
root: { root: {
log: { log: {
@ -144,7 +142,7 @@ function App() {
}, [code]) }, [code])
const shouldFormat = useMemo(() => { const shouldFormat = useMemo(() => {
if (!ast) return false if (!ast) return false
const recastedCode = recast(ast, tokens) const recastedCode = recast(ast)
return recastedCode !== code return recastedCode !== code
}, [code, ast]) }, [code, ast])
return ( return (

View File

@ -52,6 +52,7 @@ export const BasePlanes = () => {
start: 0, start: 0,
end: 0, end: 0,
body: [], body: [],
nonCodeMeta: {},
} }
const axis = axisIndex === 0 ? 'xy' : axisIndex === 1 ? 'xz' : 'yz' const axis = axisIndex === 0 ? 'xy' : axisIndex === 1 ? 'xz' : 'yz'
const quaternion = new Quaternion() const quaternion = new Quaternion()

View File

@ -62,6 +62,7 @@ export const SketchPlane = () => {
start: 0, start: 0,
end: 0, end: 0,
body: [], body: [],
nonCodeMeta: {},
} }
const addLinePoint: [number, number] = [point.x, point.y] const addLinePoint: [number, number] = [point.x, point.y]
const { modifiedAst } = addLine( const { modifiedAst } = addLine(

View File

@ -29,6 +29,7 @@ describe('testing AST', () => {
test('test 5 + 6', () => { test('test 5 + 6', () => {
const tokens = lexer('5 +6') const tokens = lexer('5 +6')
const result = abstractSyntaxTree(tokens) const result = abstractSyntaxTree(tokens)
delete (result as any).nonCodeMeta
expect(result).toEqual({ expect(result).toEqual({
type: 'Program', type: 'Program',
start: 0, start: 0,
@ -219,6 +220,7 @@ describe('testing function declaration', () => {
test('fn funcN = () => {}', () => { test('fn funcN = () => {}', () => {
const tokens = lexer('fn funcN = () => {}') const tokens = lexer('fn funcN = () => {}')
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -259,6 +261,7 @@ describe('testing function declaration', () => {
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n') ['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
) )
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -337,6 +340,7 @@ describe('testing function declaration', () => {
const myVar = funcN(1, 2)` const myVar = funcN(1, 2)`
) )
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -469,6 +473,7 @@ describe('structures specific to this lang', () => {
` `
const tokens = lexer(code) const tokens = lexer(code)
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -657,7 +662,9 @@ describe('testing hasPipeOperator', () => {
` `
const tokens = lexer(code) const tokens = lexer(code)
expect(hasPipeOperator(tokens, 0)).toEqual({ const result = hasPipeOperator(tokens, 0)
delete (result as any).bonusNonCodeNode
expect(result).toEqual({
index: 16, index: 16,
token: { end: 37, start: 35, type: 'operator', value: '|>' }, token: { end: 37, start: 35, type: 'operator', value: '|>' },
}) })
@ -669,6 +676,7 @@ describe('testing hasPipeOperator', () => {
` `
const tokens = lexer(code) const tokens = lexer(code)
const result = hasPipeOperator(tokens, 0) const result = hasPipeOperator(tokens, 0)
delete (result as any).bonusNonCodeNode
expect(result).toEqual({ expect(result).toEqual({
index: 16, index: 16,
token: { end: 37, start: 35, type: 'operator', value: '|>' }, token: { end: 37, start: 35, type: 'operator', value: '|>' },
@ -690,6 +698,7 @@ const yo = myFunc(9()
let code = `const myVar2 = 5 + 1 |> myFn(%)` let code = `const myVar2 = 5 + 1 |> myFn(%)`
const tokens = lexer(code) const tokens = lexer(code)
const result = hasPipeOperator(tokens, 1) const result = hasPipeOperator(tokens, 1)
delete (result as any).bonusNonCodeNode
expect(result).toEqual({ expect(result).toEqual({
index: 12, index: 12,
token: { end: 23, start: 21, type: 'operator', value: '|>' }, token: { end: 23, start: 21, type: 'operator', value: '|>' },
@ -718,6 +727,7 @@ const yo = myFunc(9()
const braceTokenIndex = tokens.findIndex(({ value }) => value === '{') const braceTokenIndex = tokens.findIndex(({ value }) => value === '{')
const result2 = hasPipeOperator(tokens, braceTokenIndex) const result2 = hasPipeOperator(tokens, braceTokenIndex)
delete (result2 as any).bonusNonCodeNode
expect(result2).toEqual({ expect(result2).toEqual({
index: 36, index: 36,
token: { end: 76, start: 74, type: 'operator', value: '|>' }, token: { end: 76, start: 74, type: 'operator', value: '|>' },
@ -737,6 +747,8 @@ describe('testing pipe operator special', () => {
` `
const tokens = lexer(code) const tokens = lexer(code)
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body[0] as any).declarations[0].init.nonCodeMeta
delete (body[0] as any).declarations[0].init.body[0].body.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -921,6 +933,7 @@ describe('testing pipe operator special', () => {
let code = `const myVar = 5 + 6 |> myFunc(45, %)` let code = `const myVar = 5 + 6 |> myFunc(45, %)`
const tokens = lexer(code) const tokens = lexer(code)
const { body } = abstractSyntaxTree(tokens) const { body } = abstractSyntaxTree(tokens)
delete (body as any)[0].declarations[0].init.nonCodeMeta
expect(body).toEqual([ expect(body).toEqual([
{ {
type: 'VariableDeclaration', type: 'VariableDeclaration',
@ -1804,6 +1817,76 @@ describe('nests binary expressions correctly', () => {
}) })
}) })
describe('check nonCodeMeta data is attached to the AST correctly', () => {
it('comments between expressions', () => {
const code = `
const yo = { a: { b: { c: '123' } } }
// this is a comment
const key = 'c'`
const nonCodeMetaInstance = {
type: 'NoneCodeNode',
start: code.indexOf('\n// this is a comment'),
end: code.indexOf('const key'),
value: '\n// this is a comment\n',
}
const { nonCodeMeta } = abstractSyntaxTree(lexer(code))
expect(nonCodeMeta[0]).toEqual(nonCodeMetaInstance)
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
const codeWithExtraStartWhitespace = '\n\n\n' + code
const { nonCodeMeta: nonCodeMeta2 } = abstractSyntaxTree(
lexer(codeWithExtraStartWhitespace)
)
expect(nonCodeMeta2[0].value).toBe(nonCodeMetaInstance.value)
expect(nonCodeMeta2[0].start).not.toBe(nonCodeMetaInstance.start)
})
it('comments nested within a block statement', () => {
const code = `sketch mySketch {
path myPath = lineTo(0,1)
lineTo(1,1) /* this is
a comment
spanning a few lines */
path rightPath = lineTo(1,0)
close()
}
`
const { body } = abstractSyntaxTree(lexer(code))
const indexOfSecondLineToExpression = 1 // 0 index so `path myPath = lineTo(0,1)` is 0
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.body
.nonCodeMeta
expect(sketchNonCodeMeta[indexOfSecondLineToExpression]).toEqual({
type: 'NoneCodeNode',
start: 67,
end: 133,
value:
' /* this is \n a comment \n spanning a few lines */\n ',
})
})
it('comments in a pipe expression', () => {
const code = [
'sketch mySk1 {',
' lineTo(1, 1)',
' path myPath = lineTo(0, 1)',
' lineTo(1, 1)',
'}',
'// a comment',
' |> rx(90, %)',
].join('\n')
const { body } = abstractSyntaxTree(lexer(code))
const bing = abstractSyntaxTree(lexer(code))
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
expect(1).toBe(1)
expect(sketchNonCodeMeta[0]).toEqual({
type: 'NoneCodeNode',
start: 75,
end: 91,
value: '\n// a comment\n ',
})
})
})
describe('testing findEndofBinaryExpression', () => { describe('testing findEndofBinaryExpression', () => {
it('1 + 2 * 3', () => { it('1 + 2 * 3', () => {
const code = `1 + 2 * 3\nconst yo = 5` const code = `1 + 2 * 3\nconst yo = 5`
@ -1853,91 +1936,3 @@ describe('testing findEndofBinaryExpression', () => {
expect(end).toBe(code.indexOf('))') + 1) expect(end).toBe(code.indexOf('))') + 1)
}) })
}) })
describe('testing code with comments', () => {
it('should ignore line comments', () => {
const comment = '// this is a comment'
const codeWithComment = `const yo = 5
${comment}
const yo2 = 6`
// filling with extra whitespace to make the source start end numbers match
const codeWithoutComment = `const yo = 5
${comment
.split('')
.map(() => ' ')
.join('')}
const yo2 = 6`
const { body } = abstractSyntaxTree(lexer(codeWithComment))
const { body: bodyWithoutComment } = abstractSyntaxTree(
lexer(codeWithoutComment)
)
expect(body).toEqual(bodyWithoutComment)
})
it('should ignore block comments', () => {
const comment = `/* this is a
multi line
comment */`
const codeWithComment = `const yo = 5${comment}
const yo2 = 6`
// filling with extra whitespace to make the source start end numbers match
const codeWithoutComment = `const yo = 5${comment
.split('')
.map(() => ' ')
.join('')}
const yo2 = 6`
const { body } = abstractSyntaxTree(lexer(codeWithComment))
const { body: bodyWithoutComment } = abstractSyntaxTree(
lexer(codeWithoutComment)
)
expect(body).toEqual(bodyWithoutComment)
})
it('comment in function declaration', () => {
const code = `const yo=(a)=>{
// this is a comment
return a
}`
const { body } = abstractSyntaxTree(lexer(code))
const yo = [
{
type: 'VariableDeclaration',
start: 0,
end: 51,
kind: 'const',
declarations: [
{
type: 'VariableDeclarator',
start: 6,
end: 51,
id: { type: 'Identifier', start: 6, end: 8, name: 'yo' },
init: {
type: 'FunctionExpression',
start: 9,
end: 51,
id: null,
params: [{ type: 'Identifier', start: 10, end: 11, name: 'a' }],
body: {
type: 'BlockStatement',
start: 14,
end: 51,
body: [
{
type: 'ReturnStatement',
start: 41,
end: 49,
argument: {
type: 'Identifier',
start: 48,
end: 49,
name: 'a',
},
},
],
},
},
},
],
},
]
expect(body).toEqual(yo)
})
})

View File

@ -21,6 +21,7 @@ type syntaxType =
| 'PipeExpression' | 'PipeExpression'
| 'PipeSubstitution' | 'PipeSubstitution'
| 'Literal' | 'Literal'
| 'NoneCodeNode'
// | 'NumberLiteral' // | 'NumberLiteral'
// | 'StringLiteral' // | 'StringLiteral'
// | 'IfStatement' // | 'IfStatement'
@ -84,6 +85,7 @@ export interface Program {
start: number start: number
end: number end: number
body: BodyItem[] body: BodyItem[]
nonCodeMeta: NoneCodeMeta
} }
interface GeneralStatement { interface GeneralStatement {
type: syntaxType type: syntaxType
@ -91,6 +93,44 @@ interface GeneralStatement {
end: number end: number
} }
interface NoneCodeNode extends GeneralStatement {
type: 'NoneCodeNode'
value: string
}
interface NoneCodeMeta {
// Stores the whitespace/comments that go after the statement who's index we're using here
[statementIndex: number]: NoneCodeNode
// Which is why we also need `start` for and whitespace at the start of the file/block
start?: NoneCodeNode
}
function makeNoneCodeNode(
tokens: Token[],
index: number
): { node?: NoneCodeNode; lastIndex: number } {
const currentToken = tokens[index]
const endIndex = findEndOfNonCodeNode(tokens, index)
const nonCodeTokens = tokens.slice(index, endIndex)
let value = nonCodeTokens.map((t) => t.value).join('')
const node: NoneCodeNode = {
type: 'NoneCodeNode',
start: currentToken.start,
end: tokens[endIndex - 1].end,
value,
}
return { node, lastIndex: endIndex - 1 }
}
export function findEndOfNonCodeNode(tokens: Token[], index: number): number {
const currentToken = tokens[index]
if (isNotCodeToken(currentToken)) {
return findEndOfNonCodeNode(tokens, index + 1)
}
return index
}
export interface ExpressionStatement extends GeneralStatement { export interface ExpressionStatement extends GeneralStatement {
type: 'ExpressionStatement' type: 'ExpressionStatement'
expression: Value expression: Value
@ -828,6 +868,7 @@ function makeSketchExpression(
export interface PipeExpression extends GeneralStatement { export interface PipeExpression extends GeneralStatement {
type: 'PipeExpression' type: 'PipeExpression'
body: Value[] body: Value[]
nonCodeMeta: NoneCodeMeta
} }
function makePipeExpression( function makePipeExpression(
@ -835,7 +876,11 @@ function makePipeExpression(
index: number index: number
): { expression: PipeExpression; lastIndex: number } { ): { expression: PipeExpression; lastIndex: number } {
const currentToken = tokens[index] const currentToken = tokens[index]
const { body, lastIndex: bodyLastIndex } = makePipeBody(tokens, index) const {
body,
lastIndex: bodyLastIndex,
nonCodeMeta,
} = makePipeBody(tokens, index)
const endToken = tokens[bodyLastIndex] const endToken = tokens[bodyLastIndex]
return { return {
expression: { expression: {
@ -843,6 +888,7 @@ function makePipeExpression(
start: currentToken.start, start: currentToken.start,
end: endToken.end, end: endToken.end,
body, body,
nonCodeMeta,
}, },
lastIndex: bodyLastIndex, lastIndex: bodyLastIndex,
} }
@ -851,8 +897,10 @@ function makePipeExpression(
function makePipeBody( function makePipeBody(
tokens: Token[], tokens: Token[],
index: number, index: number,
previousValues: Value[] = [] previousValues: Value[] = [],
): { body: Value[]; lastIndex: number } { previousNonCodeMeta: NoneCodeMeta = {}
): { body: Value[]; lastIndex: number; nonCodeMeta: NoneCodeMeta } {
const nonCodeMeta = { ...previousNonCodeMeta }
const currentToken = tokens[index] const currentToken = tokens[index]
const expressionStart = nextMeaningfulToken(tokens, index) const expressionStart = nextMeaningfulToken(tokens, index)
let value: Value let value: Value
@ -874,10 +922,18 @@ function makePipeBody(
return { return {
body: [...previousValues, value], body: [...previousValues, value],
lastIndex, lastIndex,
nonCodeMeta,
} }
} }
// const nextToken = nextMeaningfulToken(tokens, nextPipeToken.index + 1) if (nextPipeToken.bonusNonCodeNode) {
return makePipeBody(tokens, nextPipeToken.index, [...previousValues, value]) nonCodeMeta[previousValues.length] = nextPipeToken.bonusNonCodeNode
}
return makePipeBody(
tokens,
nextPipeToken.index,
[...previousValues, value],
nonCodeMeta
)
} }
export interface FunctionExpression extends GeneralStatement { export interface FunctionExpression extends GeneralStatement {
@ -938,6 +994,7 @@ function makeParams(
export interface BlockStatement extends GeneralStatement { export interface BlockStatement extends GeneralStatement {
type: 'BlockStatement' type: 'BlockStatement'
body: BodyItem[] body: BodyItem[]
nonCodeMeta: NoneCodeMeta
} }
function makeBlockStatement( function makeBlockStatement(
@ -945,10 +1002,10 @@ function makeBlockStatement(
index: number index: number
): { block: BlockStatement; lastIndex: number } { ): { block: BlockStatement; lastIndex: number } {
const openingCurly = tokens[index] const openingCurly = tokens[index]
const nextToken = nextMeaningfulToken(tokens, index) const nextToken = { token: tokens[index + 1], index: index + 1 }
const { body, lastIndex } = const { body, lastIndex, nonCodeMeta } =
nextToken.token.value === '}' nextToken.token.value === '}'
? { body: [], lastIndex: nextToken.index } ? { body: [], lastIndex: nextToken.index, nonCodeMeta: {} }
: makeBody({ tokens, tokenIndex: nextToken.index }) : makeBody({ tokens, tokenIndex: nextToken.index })
return { return {
block: { block: {
@ -956,6 +1013,7 @@ function makeBlockStatement(
start: openingCurly.start, start: openingCurly.start,
end: tokens[lastIndex]?.end || 0, end: tokens[lastIndex]?.end || 0,
body, body,
nonCodeMeta,
}, },
lastIndex, lastIndex,
} }
@ -986,18 +1044,24 @@ function makeReturnStatement(
export type All = Program | ExpressionStatement[] | BinaryExpression | Literal export type All = Program | ExpressionStatement[] | BinaryExpression | Literal
function nextMeaningfulToken( export function nextMeaningfulToken(
tokens: Token[], tokens: Token[],
index: number, index: number,
offset: number = 1 offset: number = 1
): { token: Token; index: number } { ): { token: Token; index: number; bonusNonCodeNode?: NoneCodeNode } {
const newIndex = index + offset const newIndex = index + offset
const token = tokens[newIndex] const token = tokens[newIndex]
if (!token) { if (!token) {
return { token, index: tokens.length } return { token, index: tokens.length }
} }
if (isNotCodeToken(token)) { if (isNotCodeToken(token)) {
return nextMeaningfulToken(tokens, index, offset + 1) const nonCodeNode = makeNoneCodeNode(tokens, newIndex)
const newnewIndex = nonCodeNode.lastIndex + 1
return {
token: tokens[newnewIndex],
index: newnewIndex,
bonusNonCodeNode: nonCodeNode?.node?.value ? nonCodeNode.node : undefined,
}
} }
return { token, index: newIndex } return { token, index: newIndex }
} }
@ -1018,10 +1082,7 @@ function previousMeaningfulToken(
return { token, index: newIndex } return { token, index: newIndex }
} }
export type BodyItem = type BodyItem = ExpressionStatement | VariableDeclaration | ReturnStatement
| ExpressionStatement
| VariableDeclaration
| ReturnStatement
function makeBody( function makeBody(
{ {
@ -1031,23 +1092,37 @@ function makeBody(
tokens: Token[] tokens: Token[]
tokenIndex?: number tokenIndex?: number
}, },
previousBody: BodyItem[] = [] previousBody: BodyItem[] = [],
): { body: BodyItem[]; lastIndex: number } { previousNonCodeMeta: NoneCodeMeta = {}
): { body: BodyItem[]; lastIndex: number; nonCodeMeta: NoneCodeMeta } {
const nonCodeMeta = { ...previousNonCodeMeta }
if (tokenIndex >= tokens.length) { if (tokenIndex >= tokens.length) {
return { body: previousBody, lastIndex: tokenIndex } return { body: previousBody, lastIndex: tokenIndex, nonCodeMeta }
} }
const token = tokens[tokenIndex] const token = tokens[tokenIndex]
if (token.type === 'brace' && token.value === '}') { if (token.type === 'brace' && token.value === '}') {
return { body: previousBody, lastIndex: tokenIndex } return { body: previousBody, lastIndex: tokenIndex, nonCodeMeta }
}
if (typeof token === 'undefined') {
console.log('probably should throw')
} }
if (isNotCodeToken(token)) { if (isNotCodeToken(token)) {
return makeBody({ tokens, tokenIndex: tokenIndex + 1 }, previousBody) const nextToken = nextMeaningfulToken(tokens, tokenIndex, 0)
if (nextToken.bonusNonCodeNode) {
if (previousBody.length === 0) {
nonCodeMeta.start = nextToken.bonusNonCodeNode
} else {
nonCodeMeta[previousBody.length] = nextToken.bonusNonCodeNode
}
}
return makeBody(
{ tokens, tokenIndex: nextToken.index },
previousBody,
nonCodeMeta
)
} }
const nextToken = nextMeaningfulToken(tokens, tokenIndex) const nextToken = nextMeaningfulToken(tokens, tokenIndex)
nextToken.bonusNonCodeNode &&
(nonCodeMeta[previousBody.length] = nextToken.bonusNonCodeNode)
if ( if (
token.type === 'word' && token.type === 'word' &&
(token.value === 'const' || (token.value === 'const' ||
@ -1060,18 +1135,26 @@ function makeBody(
tokenIndex tokenIndex
) )
const nextThing = nextMeaningfulToken(tokens, lastIndex) const nextThing = nextMeaningfulToken(tokens, lastIndex)
return makeBody({ tokens, tokenIndex: nextThing.index }, [ nextThing.bonusNonCodeNode &&
...previousBody, (nonCodeMeta[previousBody.length] = nextThing.bonusNonCodeNode)
declaration,
]) return makeBody(
{ tokens, tokenIndex: nextThing.index },
[...previousBody, declaration],
nonCodeMeta
)
} }
if (token.type === 'word' && token.value === 'return') { if (token.type === 'word' && token.value === 'return') {
const { statement, lastIndex } = makeReturnStatement(tokens, tokenIndex) const { statement, lastIndex } = makeReturnStatement(tokens, tokenIndex)
const nextThing = nextMeaningfulToken(tokens, lastIndex) const nextThing = nextMeaningfulToken(tokens, lastIndex)
return makeBody({ tokens, tokenIndex: nextThing.index }, [ nextThing.bonusNonCodeNode &&
...previousBody, (nonCodeMeta[previousBody.length] = nextThing.bonusNonCodeNode)
statement,
]) return makeBody(
{ tokens, tokenIndex: nextThing.index },
[...previousBody, statement],
nonCodeMeta
)
} }
if ( if (
token.type === 'word' && token.type === 'word' &&
@ -1083,31 +1166,44 @@ function makeBody(
tokenIndex tokenIndex
) )
const nextThing = nextMeaningfulToken(tokens, lastIndex) const nextThing = nextMeaningfulToken(tokens, lastIndex)
return makeBody({ tokens, tokenIndex: nextThing.index }, [ if (nextThing.bonusNonCodeNode) {
...previousBody, nonCodeMeta[previousBody.length] = nextThing.bonusNonCodeNode
expression,
])
} }
return makeBody(
{ tokens, tokenIndex: nextThing.index },
[...previousBody, expression],
nonCodeMeta
)
}
const nextThing = nextMeaningfulToken(tokens, tokenIndex)
if ( if (
(token.type === 'number' || token.type === 'word') && (token.type === 'number' || token.type === 'word') &&
nextMeaningfulToken(tokens, tokenIndex).token.type === 'operator' nextThing.token.type === 'operator'
) { ) {
if (nextThing.bonusNonCodeNode) {
nonCodeMeta[previousBody.length] = nextThing.bonusNonCodeNode
}
const { expression, lastIndex } = makeExpressionStatement( const { expression, lastIndex } = makeExpressionStatement(
tokens, tokens,
tokenIndex tokenIndex
) )
// return startTree(tokens, tokenIndex, [...previousBody, makeExpressionStatement(tokens, tokenIndex)]); return {
return { body: [...previousBody, expression], lastIndex } body: [...previousBody, expression],
nonCodeMeta: nonCodeMeta,
lastIndex,
}
} }
throw new Error('Unexpected token') throw new Error('Unexpected token')
} }
export const abstractSyntaxTree = (tokens: Token[]): Program => { export const abstractSyntaxTree = (tokens: Token[]): Program => {
const { body } = makeBody({ tokens }) const { body, nonCodeMeta } = makeBody({ tokens })
const program: Program = { const program: Program = {
type: 'Program', type: 'Program',
start: 0, start: 0,
end: body[body.length - 1].end, end: body[body.length - 1].end,
body: body, body: body,
nonCodeMeta,
} }
return program return program
} }
@ -1138,7 +1234,6 @@ export function findNextDeclarationKeyword(
) { ) {
return nextToken return nextToken
} }
// return findNextDeclarationKeyword(tokens, nextToken.index)
// probably should do something else here // probably should do something else here
// throw new Error('Unexpected token') // throw new Error('Unexpected token')
} }
@ -1190,7 +1285,7 @@ export function hasPipeOperator(
tokens: Token[], tokens: Token[],
index: number, index: number,
_limitIndex = -1 _limitIndex = -1
): { token: Token; index: number } | false { ): ReturnType<typeof nextMeaningfulToken> | false {
// this probably still needs some work // this probably still needs some work
// should be called on expression statuments (i.e "lineTo" for lineTo(10, 10)) or "{" for sketch declarations // should be called on expression statuments (i.e "lineTo" for lineTo(10, 10)) or "{" for sketch declarations
let limitIndex = _limitIndex let limitIndex = _limitIndex
@ -1538,8 +1633,8 @@ export function getNodePathFromSourceRange(
export function isNotCodeToken(token: Token): boolean { export function isNotCodeToken(token: Token): boolean {
return ( return (
token.type === 'whitespace' || token?.type === 'whitespace' ||
token.type === 'linecomment' || token?.type === 'linecomment' ||
token.type === 'blockcomment' token?.type === 'blockcomment'
) )
} }

View File

@ -24,6 +24,7 @@ export function addSketchTo(
type: 'BlockStatement', type: 'BlockStatement',
...dumbyStartend, ...dumbyStartend,
body: [], body: [],
nonCodeMeta: {},
} }
const sketch: SketchExpression = { const sketch: SketchExpression = {
type: 'SketchExpression', type: 'SketchExpression',
@ -56,6 +57,7 @@ export function addSketchTo(
const pipChain: PipeExpression = { const pipChain: PipeExpression = {
type: 'PipeExpression', type: 'PipeExpression',
nonCodeMeta: {},
...dumbyStartend, ...dumbyStartend,
body: [sketch, rotate], body: [sketch, rotate],
} }
@ -344,11 +346,13 @@ export function extrudeSketch(
const pipeChain: PipeExpression = isInPipeExpression const pipeChain: PipeExpression = isInPipeExpression
? { ? {
type: 'PipeExpression', type: 'PipeExpression',
nonCodeMeta: {},
...dumbyStartend, ...dumbyStartend,
body: [...pipeExpression.body, extrudeCall], body: [...pipeExpression.body, extrudeCall],
} }
: { : {
type: 'PipeExpression', type: 'PipeExpression',
nonCodeMeta: {},
...dumbyStartend, ...dumbyStartend,
body: [sketchExpression, extrudeCall], body: [sketchExpression, extrudeCall],
} }
@ -460,6 +464,7 @@ export function sketchOnExtrudedFace(
// create pipe expression with a sketch block piped into a transform function // create pipe expression with a sketch block piped into a transform function
const sketchPipe: PipeExpression = { const sketchPipe: PipeExpression = {
type: 'PipeExpression', type: 'PipeExpression',
nonCodeMeta: {},
...dumbyStartend, ...dumbyStartend,
body: [ body: [
{ {
@ -469,6 +474,7 @@ export function sketchOnExtrudedFace(
type: 'BlockStatement', type: 'BlockStatement',
...dumbyStartend, ...dumbyStartend,
body: [], body: [],
nonCodeMeta: {},
}, },
}, },
{ {

View File

@ -1,210 +0,0 @@
import { findTokensBetweenStatements } from './nonAstTokenHelpers'
import { Token } from './tokeniser'
import { BodyItem } from './abstractSyntaxTree'
describe('verify code', () => {
it('should find tokens between statements', () => {
const statement1 = {
type: 'yoyo',
start: 105,
end: 111,
}
const statement2 = {
type: 'yoyo',
start: 150,
end: 156,
}
const tokens: Token[] = [
{
type: 'word',
value: 'yoyo',
start: 100,
end: 104,
},
{
type: 'whitespace',
value: ' ',
start: 111,
end: 115,
},
{
type: 'linecomment',
value: '// this is a comment',
start: 115,
end: 119,
},
{
type: 'whitespace',
value: ' ',
start: 157,
end: 161,
},
]
const result = findTokensBetweenStatements(statement1, statement2, tokens)
// should grab the middle two tokens an the start and end tokens are less than the first statement
// and greater than the second statement respectively
expect(result).toEqual([
{ type: 'whitespace', value: ' ', start: 111, end: 115 },
{
type: 'linecomment',
value: '// this is a comment',
start: 115,
end: 119,
},
])
})
it('propert test with our types', () => {
const tokens: Token[] = [
{
type: 'whitespace',
value: '\n',
start: 37,
end: 38,
},
{
type: 'linecomment',
value: '// this is a comment',
start: 38,
end: 58,
},
{
type: 'whitespace',
value: '\n',
start: 58,
end: 59,
},
]
const statement1: BodyItem = {
type: 'VariableDeclaration',
start: 0,
end: 37,
kind: 'const',
declarations: [
{
type: 'VariableDeclarator',
start: 6,
end: 37,
id: {
type: 'Identifier',
start: 6,
end: 8,
name: 'yo',
},
init: {
type: 'ObjectExpression',
start: 11,
end: 37,
properties: [
{
type: 'ObjectProperty',
start: 13,
end: 35,
key: {
type: 'Identifier',
start: 13,
end: 14,
name: 'a',
},
value: {
type: 'ObjectExpression',
start: 16,
end: 35,
properties: [
{
type: 'ObjectProperty',
start: 18,
end: 33,
key: {
type: 'Identifier',
start: 18,
end: 19,
name: 'b',
},
value: {
type: 'ObjectExpression',
start: 21,
end: 33,
properties: [
{
type: 'ObjectProperty',
start: 23,
end: 31,
key: {
type: 'Identifier',
start: 23,
end: 24,
name: 'c',
},
value: {
type: 'Literal',
start: 26,
end: 31,
value: '123',
raw: "'123'",
},
},
],
},
},
],
},
},
],
},
},
],
}
const statement2: BodyItem = {
type: 'VariableDeclaration',
start: 59,
end: 74,
kind: 'const',
declarations: [
{
type: 'VariableDeclarator',
start: 65,
end: 74,
id: {
type: 'Identifier',
start: 65,
end: 68,
name: 'key',
},
init: {
type: 'Literal',
start: 71,
end: 74,
value: 'c',
raw: "'c'",
},
},
],
}
const result = findTokensBetweenStatements(statement1, statement2, tokens)
expect(result).toEqual([
{
type: 'whitespace',
value: '\n',
start: 37,
end: 38,
},
{
type: 'linecomment',
value: '// this is a comment',
start: 38,
end: 58,
},
{
type: 'whitespace',
value: '\n',
start: 58,
end: 59,
},
])
})
})

View File

@ -1,123 +0,0 @@
import { Token } from './tokeniser'
import { Program, BodyItem } from './abstractSyntaxTree'
export function findTokensBetweenStatements(
statement1: { start: number; end: number },
statement2: { start: number; end: number },
tokens: Token[]
): Token[] {
// Find the start index of the range using binary search
let startIndex = firstGreaterThanBinarySearch(tokens, statement1.end, 'start')
if (startIndex < 0) {
startIndex = ~startIndex
}
// Find the end index of the range using binary search
let endIndex = firstGreaterThanBinarySearch(tokens, statement2.end, 'start')
if (endIndex < 0) {
endIndex = ~endIndex
}
// Return the tokens between the start and end index
return tokens.slice(startIndex, endIndex)
}
function firstGreaterThanBinarySearch(
tokens: { start: number; end: number }[],
target: number,
property: 'start' | 'end'
): number {
let left = 0
// has trouble with including tokens at the end of the range
const paddedTokens = [
{
type: 'whitespace',
value: '',
start: 0,
end: 0,
},
...tokens,
{
type: 'whitespace',
value: '',
start: tokens[tokens.length - 1]?.end + 1000,
end: tokens[tokens.length - 1]?.end + 1001,
},
]
let right = paddedTokens.length - 1
while (left <= right) {
const middle = left + Math.floor((right - left) / 2)
if (paddedTokens[middle]?.[property] >= target) {
if (middle === 1 || paddedTokens[middle - 1]?.[property] < target) {
// minus 1 because of the padding
return middle - 1
}
right = middle - 1
} else {
left = middle + 1
}
}
return -1
}
export function getNonCodeString(
body: Program['body'],
index: number,
tokens: Token[]
): string {
let tokensToIntegrate: Token[] = []
const currentStatement = body[index]
const nextStatement = body[index + 1]
if (nextStatement && nextStatement.start && currentStatement.end) {
tokensToIntegrate = findTokensBetweenStatements(
currentStatement,
nextStatement,
tokens
)
} else if (index === body.length - 1) {
const tokensAfter = firstGreaterThanBinarySearch(
tokens,
currentStatement?.end,
'start'
)
if (tokensAfter > 0) {
tokensToIntegrate = tokens.slice(tokensAfter)
}
}
if (tokensToIntegrate.length > 0) {
const nonCodeString = tokensToIntegrate.map((token) => token.value).join('')
// check it extra ends with a line break followed by spaces (only spaces not new lines)
const hasWhitespaceOnEnd = nonCodeString.match(/(\n *)$/)
if (hasWhitespaceOnEnd) {
// we always put each statement on a new line, so this prevents it adding an extra line
// however if the user puts more than one line break between statements, we'll respect it since
// we're only removing the last one
return nonCodeString.slice(0, -hasWhitespaceOnEnd[0].length)
}
return nonCodeString
}
return ''
}
export function getStartNonCodeString(
firstStatement: BodyItem,
tokens: Token[]
): string {
if (!firstStatement) return ''
const tokensBeforeIndex = tokens.length
? firstGreaterThanBinarySearch(tokens, firstStatement.start, 'end')
: 0
let nonCodeString = ''
if (tokensBeforeIndex > 0) {
nonCodeString = tokens
.slice(0, tokensBeforeIndex)
.map((token) => token.value)
.join('')
}
return nonCodeString.trim() ? nonCodeString.trim() + '\n' : ''
}

View File

@ -1,4 +1,4 @@
import { recast, processTokens } from './recast' import { recast } from './recast'
import { Program, abstractSyntaxTree } from './abstractSyntaxTree' import { Program, abstractSyntaxTree } from './abstractSyntaxTree'
import { lexer, Token } from './tokeniser' import { lexer, Token } from './tokeniser'
import fs from 'node:fs' import fs from 'node:fs'
@ -47,7 +47,7 @@ const myVar = "hello"
log(5, myVar)` log(5, myVar)`
const { ast } = code2ast(code) const { ast } = code2ast(code)
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(code.trim()) expect(recasted).toBe(code)
}) })
it('function declaration with call', () => { it('function declaration with call', () => {
const code = [ const code = [
@ -59,7 +59,7 @@ log(5, myVar)`
].join('\n') ].join('\n')
const { ast } = code2ast(code) const { ast } = code2ast(code)
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(code.trim()) expect(recasted).toBe(code)
}) })
it('sketch declaration', () => { it('sketch declaration', () => {
let code = `sketch mySketch { let code = `sketch mySketch {
@ -97,7 +97,7 @@ show(mySketch)
].join('\n') ].join('\n')
const { ast } = code2ast(code) const { ast } = code2ast(code)
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(code.trim()) expect(recasted).toBe(code)
}) })
it('recast nested binary expression', () => { it('recast nested binary expression', () => {
const code = ['const myVar = 1 + 2 * 5'].join('\n') const code = ['const myVar = 1 + 2 * 5'].join('\n')
@ -180,106 +180,114 @@ const myVar2 = yo['a'][key2].c`
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(code.trim()) expect(recasted).toBe(code.trim())
}) })
})
describe('testing recasting with comments and whitespace', () => {
it('code with comments', () => { it('code with comments', () => {
const code = ` const code = `
const yo = { a: { b: { c: '123' } } } const yo = { a: { b: { c: '123' } } }
// this is a comment // this is a comment
const key = 'c'` const key = 'c'`
const { ast, tokens } = code2ast(code) const { ast } = code2ast(code)
const processedTokens = processTokens(tokens) const recasted = recast(ast)
const recasted = recast(ast, processedTokens)
expect(recasted).toBe(code.trim()) expect(recasted).toBe(code)
}) })
it('code with extra whitespace should be respected when recasted', () => { it('code with comment and extra lines', () => {
const withExtraEmptylLineBetween = ` const code = `
const yo = { a: { b: { c: '123' } } } const yo = 'c' /* this is
a
comment */
const key = 'c'` const yo = 'bing'`
const { ast } = code2ast(code)
const { ast, tokens } = code2ast(withExtraEmptylLineBetween) const recasted = recast(ast)
const processedTokens = processTokens(tokens) expect(recasted).toBe(code)
const recasted = recast(ast, processedTokens)
expect(recasted).toBe(withExtraEmptylLineBetween.trim())
}) })
it('code with block comment in between', () => { it('comments at the start and end', () => {
const withExtraEmptylLineBetween = ` const code = `
const yo = { a: { b: { c: '123' } } } // this is a comment
/* hi there
yo yo yo
*/
const key = 'c'`
const { ast, tokens } = code2ast(withExtraEmptylLineBetween)
const processedTokens = processTokens(tokens)
const recasted = recast(ast, processedTokens)
expect(recasted).toBe(withExtraEmptylLineBetween.trim())
})
it('code with block comment line comment and empty line', () => {
const withExtraEmptylLineBetween = `
const yo = { a: { b: { c: '123' } } }
/* hi there
yo yo yo
*/
// empty line above and line comment here
const key = 'c'`
const { ast, tokens } = code2ast(withExtraEmptylLineBetween)
const processedTokens = processTokens(tokens)
const recasted = recast(ast, processedTokens)
expect(recasted).toBe(withExtraEmptylLineBetween.trim())
})
it('code comment at the start and end', () => {
const withExtraEmptylLineBetween = `
// comment at the start
const yo = { a: { b: { c: '123' } } } const yo = { a: { b: { c: '123' } } }
const key = 'c' const key = 'c'
// comment at the end`
const { ast, tokens } = code2ast(withExtraEmptylLineBetween) // this is also a comment`
const processedTokens = processTokens(tokens) const { ast } = code2ast(code)
const recasted = recast(ast, processedTokens) const recasted = recast(ast)
expect(recasted).toBe(code)
expect(recasted).toBe(withExtraEmptylLineBetween.trim())
}) })
it('comments and random new lines between statements within function declarations are fine', () => { it('comments in a fn block', () => {
const withExtraEmptylLineBetween = ` const code = `
const fn = (a) => { const myFn = () => {
const yo = 5 // this is a comment
const yo = { a: { b: { c: '123' } } } /* block
comment */
// a comment const key = 'c'
// this is also a comment
}`
const { ast } = code2ast(code)
const recasted = recast(ast)
expect(recasted).toBe(code)
})
it('comments in a sketch block', () => {
const code = `
sketch mySketch { /* comment at start */
// comment at start more
path myPath = lineTo(0, 1) /* comment here with
some whitespace below */
return a + yo lineTo(1, 1)
/* comment before declaration*/path rightPath = lineTo(1, 0)
close()
// comment at end
}` }`
const { ast } = code2ast(code)
const { ast, tokens } = code2ast(withExtraEmptylLineBetween) const recasted = recast(ast)
const processedTokens = processTokens(tokens) expect(recasted).toBe(code)
const recasted = recast(ast, processedTokens)
expect(recasted).toBe(withExtraEmptylLineBetween.trim())
}) })
it('Comment with sketch', () => { it('comments in a pipe expression', () => {
const withExtraEmptylLineBetween = `sketch part001 { const code = [
lineTo(5.98, -0.04) 'sketch mySk1 {',
// yo ' lineTo(1, 1)',
' path myPath = lineTo(0, 1)',
' lineTo(1, 1)',
'}',
' // a comment',
' |> rx(90, %)',
].join('\n')
const { ast } = code2ast(code)
const recasted = recast(ast)
expect(recasted).toBe(code)
})
it('comments sprinkled in all over the place', () => {
const code = `
/* comment at start */
lineTo(0.18, 0.03) sketch mySk1 {
lineTo(1, 1)
// comment here
path myPath = lineTo(0, 1)
lineTo(1, 1) /* and
here
*/
} }
// a comment between pipe expression statements
|> rx(90, %) |> rx(90, %)
|> extrude(9.6, %) // and another with just white space between others below
|> ry(45, %)
show(part001)`
const { ast, tokens } = code2ast(withExtraEmptylLineBetween) |> rx(45, %)
const processedTokens = processTokens(tokens) /*
const recasted = recast(ast, processedTokens) one more for good measure
expect(recasted).toBe(withExtraEmptylLineBetween.trim()) */`
const { ast } = code2ast(code)
const recasted = recast(ast)
expect(recasted).toBe(code)
}) })
}) })

View File

@ -1,3 +1,4 @@
import { start } from 'repl'
import { import {
Program, Program,
BinaryExpression, BinaryExpression,
@ -10,32 +11,17 @@ import {
ArrayExpression, ArrayExpression,
ObjectExpression, ObjectExpression,
MemberExpression, MemberExpression,
PipeExpression,
} from './abstractSyntaxTree' } from './abstractSyntaxTree'
import { precedence } from './astMathExpressions' import { precedence } from './astMathExpressions'
import { Token } from './tokeniser'
import { getNonCodeString, getStartNonCodeString } from './nonAstTokenHelpers'
export const processTokens = (tokens: Token[]): Token[] => {
return tokens.filter((token) => {
if (token.type === 'linecomment' || token.type === 'blockcomment')
return true
if (token.type === 'whitespace') {
if (token.value.includes('\n')) return true
}
return false
})
}
export function recast( export function recast(
ast: Program, ast: Program,
tokens: Token[] = [],
previousWrittenCode = '', previousWrittenCode = '',
indentation = '' indentation = '',
isWithBlock = false
): string { ): string {
let startComments = getStartNonCodeString(ast?.body?.[0], tokens) return ast.body
return (
startComments +
ast.body
.map((statement) => { .map((statement) => {
if (statement.type === 'ExpressionStatement') { if (statement.type === 'ExpressionStatement') {
if (statement.expression.type === 'BinaryExpression') { if (statement.expression.type === 'BinaryExpression') {
@ -45,7 +31,7 @@ export function recast(
} else if (statement.expression.type === 'ObjectExpression') { } else if (statement.expression.type === 'ObjectExpression') {
return recastObjectExpression(statement.expression) return recastObjectExpression(statement.expression)
} else if (statement.expression.type === 'CallExpression') { } else if (statement.expression.type === 'CallExpression') {
return recastCallExpression(statement.expression, tokens) return recastCallExpression(statement.expression)
} }
} else if (statement.type === 'VariableDeclaration') { } else if (statement.type === 'VariableDeclaration') {
return statement.declarations return statement.declarations
@ -60,22 +46,45 @@ export function recast(
: ' = ' : ' = '
return `${statement.kind} ${ return `${statement.kind} ${
declaration.id.name declaration.id.name
}${assignmentString}${recastValue(declaration.init, '', tokens)}` }${assignmentString}${recastValue(declaration.init)}`
}) })
.join('') .join('')
} else if (statement.type === 'ReturnStatement') { } else if (statement.type === 'ReturnStatement') {
return `return ${recastArgument(statement.argument, tokens)}` return `return ${recastArgument(statement.argument)}`
} }
return statement.type return statement.type
}) })
.map( .map((recastStr, index, arr) => {
(statementString, index) => const isLegitCustomWhitespaceOrComment = (str: string) =>
indentation + str !== ' ' && str !== '\n' && str !== ' '
statementString +
getNonCodeString(ast.body, index, tokens) // determine the value of startString
) const lastWhiteSpaceOrComment =
.join('\n') index > 0 ? ast?.nonCodeMeta?.[index - 1]?.value : ' '
// indentation of this line will be covered by the previous if we're using a custom whitespace or comment
let startString = isLegitCustomWhitespaceOrComment(
lastWhiteSpaceOrComment
) )
? ''
: indentation
if (index === 0) {
startString = ast?.nonCodeMeta?.start?.value || indentation
}
if (startString.endsWith('\n')) {
startString += indentation
}
// determine the value of endString
const maybeLineBreak: string =
index === arr.length - 1 && !isWithBlock ? '' : '\n'
let customWhiteSpaceOrComment = ast?.nonCodeMeta?.[index]?.value
if (!isLegitCustomWhitespaceOrComment(customWhiteSpaceOrComment))
customWhiteSpaceOrComment = ''
let endString = customWhiteSpaceOrComment || maybeLineBreak
return startString + recastStr + endString
})
.join('')
} }
function recastBinaryExpression(expression: BinaryExpression): string { function recastBinaryExpression(expression: BinaryExpression): string {
@ -151,16 +160,13 @@ function recastLiteral(literal: Literal): string {
return String(literal?.value) return String(literal?.value)
} }
function recastCallExpression( function recastCallExpression(expression: CallExpression): string {
expression: CallExpression,
tokens: Token[] = []
): string {
return `${expression.callee.name}(${expression.arguments return `${expression.callee.name}(${expression.arguments
.map((arg) => recastArgument(arg, tokens)) .map(recastArgument)
.join(', ')})` .join(', ')})`
} }
function recastArgument(argument: Value, tokens: Token[] = []): string { function recastArgument(argument: Value): string {
if (argument.type === 'Literal') { if (argument.type === 'Literal') {
return recastLiteral(argument) return recastLiteral(argument)
} else if (argument.type === 'Identifier') { } else if (argument.type === 'Identifier') {
@ -172,33 +178,28 @@ function recastArgument(argument: Value, tokens: Token[] = []): string {
} else if (argument.type === 'ObjectExpression') { } else if (argument.type === 'ObjectExpression') {
return recastObjectExpression(argument) return recastObjectExpression(argument)
} else if (argument.type === 'CallExpression') { } else if (argument.type === 'CallExpression') {
return recastCallExpression(argument, tokens) return recastCallExpression(argument)
} else if (argument.type === 'FunctionExpression') { } else if (argument.type === 'FunctionExpression') {
return recastFunction(argument, tokens) return recastFunction(argument)
} else if (argument.type === 'PipeSubstitution') { } else if (argument.type === 'PipeSubstitution') {
return '%' return '%'
} }
throw new Error(`Cannot recast argument ${argument}`) throw new Error(`Cannot recast argument ${argument}`)
} }
function recastFunction( function recastFunction(expression: FunctionExpression): string {
expression: FunctionExpression, return `(${expression.params
tokens: Token[] = [], .map((param) => param.name)
indentation = '' .join(', ')}) => {${recast(expression.body, '', '', true)}}`
): string {
return `(${expression.params.map((param) => param.name).join(', ')}) => {
${recast(expression.body, tokens, '', indentation + ' ')}
}`
} }
function recastSketchExpression( function recastSketchExpression(
expression: SketchExpression, expression: SketchExpression,
indentation: string, indentation: string
tokens: Token[] = []
): string { ): string {
return `{ return `{${
${recast(expression.body, tokens, '', indentation + ' ').trimEnd()} recast(expression.body, '', indentation + ' ', true) || '\n \n'
}` }}`
} }
function recastMemberExpression( function recastMemberExpression(
@ -218,11 +219,7 @@ function recastMemberExpression(
return expression.object.name + keyString return expression.object.name + keyString
} }
function recastValue( function recastValue(node: Value, indentation = ''): string {
node: Value,
indentation = '',
tokens: Token[] = []
): string {
if (node.type === 'BinaryExpression') { if (node.type === 'BinaryExpression') {
return recastBinaryExpression(node) return recastBinaryExpression(node)
} else if (node.type === 'ArrayExpression') { } else if (node.type === 'ArrayExpression') {
@ -234,17 +231,38 @@ function recastValue(
} else if (node.type === 'Literal') { } else if (node.type === 'Literal') {
return recastLiteral(node) return recastLiteral(node)
} else if (node.type === 'FunctionExpression') { } else if (node.type === 'FunctionExpression') {
return recastFunction(node, tokens) return recastFunction(node)
} else if (node.type === 'CallExpression') { } else if (node.type === 'CallExpression') {
return recastCallExpression(node, tokens) return recastCallExpression(node)
} else if (node.type === 'Identifier') { } else if (node.type === 'Identifier') {
return node.name return node.name
} else if (node.type === 'SketchExpression') { } else if (node.type === 'SketchExpression') {
return recastSketchExpression(node, indentation, tokens) return recastSketchExpression(node, indentation)
} else if (node.type === 'PipeExpression') { } else if (node.type === 'PipeExpression') {
return node.body return recastPipeExpression(node)
.map((statement): string => recastValue(statement, indentation, tokens))
.join('\n |> ')
} }
return '' return ''
} }
function recastPipeExpression(expression: PipeExpression): string {
return expression.body
.map((statement, index, arr): string => {
let str = ''
let indentation = ' '
let maybeLineBreak = '\n'
str = recastValue(statement)
if (
expression.nonCodeMeta?.[index]?.value &&
expression.nonCodeMeta?.[index].value !== ' '
) {
str += expression.nonCodeMeta[index]?.value
indentation = ''
maybeLineBreak = ''
}
if (index !== arr.length - 1) {
str += maybeLineBreak + indentation + '|> '
}
return str
})
.join('')
}

View File

@ -7,8 +7,7 @@ import {
} from './lang/abstractSyntaxTree' } from './lang/abstractSyntaxTree'
import { ProgramMemory, Position, PathToNode, Rotation } from './lang/executor' import { ProgramMemory, Position, PathToNode, Rotation } from './lang/executor'
import { recast } from './lang/recast' import { recast } from './lang/recast'
import { lexer, Token } from './lang/tokeniser' import { lexer } from './lang/tokeniser'
import { processTokens } from './lang/recast'
export type Range = [number, number] export type Range = [number, number]
@ -64,7 +63,7 @@ interface StoreState {
addLog: (log: string) => void addLog: (log: string) => void
resetLogs: () => void resetLogs: () => void
ast: Program | null ast: Program | null
setAst: (ast: Program | null, tokens?: Token[]) => void setAst: (ast: Program | null) => void
updateAst: (ast: Program, focusPath?: PathToNode) => void updateAst: (ast: Program, focusPath?: PathToNode) => void
code: string code: string
setCode: (code: string) => void setCode: (code: string) => void
@ -76,7 +75,6 @@ interface StoreState {
setError: (error?: string) => void setError: (error?: string) => void
programMemory: ProgramMemory programMemory: ProgramMemory
setProgramMemory: (programMemory: ProgramMemory) => void setProgramMemory: (programMemory: ProgramMemory) => void
tokens: Token[]
} }
export const useStore = create<StoreState>()((set, get) => ({ export const useStore = create<StoreState>()((set, get) => ({
@ -121,16 +119,11 @@ export const useStore = create<StoreState>()((set, get) => ({
set({ logs: [] }) set({ logs: [] })
}, },
ast: null, ast: null,
setAst: (ast, tokens) => { setAst: (ast) => {
if (tokens) { set({ ast })
set({ tokens: processTokens(tokens), ast })
} else {
set({ ast, tokens: [] })
}
}, },
updateAst: (ast, focusPath) => { updateAst: (ast, focusPath) => {
const tokens = get().tokens const newCode = recast(ast)
const newCode = recast(ast, tokens)
const astWithUpdatedSource = abstractSyntaxTree(lexer(newCode)) const astWithUpdatedSource = abstractSyntaxTree(lexer(newCode))
set({ ast: astWithUpdatedSource, code: newCode }) set({ ast: astWithUpdatedSource, code: newCode })
@ -149,9 +142,8 @@ export const useStore = create<StoreState>()((set, get) => ({
}, },
formatCode: () => { formatCode: () => {
const code = get().code const code = get().code
const tokens = lexer(code) const ast = abstractSyntaxTree(lexer(code))
const ast = abstractSyntaxTree(tokens) const newCode = recast(ast)
const newCode = recast(ast, processTokens(tokens))
set({ code: newCode, ast }) set({ code: newCode, ast })
}, },
errorState: { errorState: {
@ -163,5 +155,4 @@ export const useStore = create<StoreState>()((set, get) => ({
}, },
programMemory: { root: {}, _sketch: [] }, programMemory: { root: {}, _sketch: [] },
setProgramMemory: (programMemory) => set({ programMemory }), setProgramMemory: (programMemory) => set({ programMemory }),
tokens: [],
})) }))