Port abstractSyntaxtTree (the Parser) to Rust/WASM 🦀 (#207)
* initial port leafs progress leafs progress leafs progress all ast with binary expressions are passing abstractSyntaxTree replaced in all js-test clippy lexer? trying to make tests happy clean up comments etc remove unused lexer is needed though re-org rust files remove old ast * fmt Signed-off-by: Jess Frazelle <github@jessfraz.com> * rearrange test fns Signed-off-by: Jess Frazelle <github@jessfraz.com> * start of returning results Signed-off-by: Jess Frazelle <github@jessfraz.com> * make tests compile again Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * more errors Signed-off-by: Jess Frazelle <github@jessfraz.com> * more errors Signed-off-by: Jess Frazelle <github@jessfraz.com> * replace more panics Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * cleanup more unwraps Signed-off-by: Jess Frazelle <github@jessfraz.com> * cleanup more unwraps Signed-off-by: Jess Frazelle <github@jessfraz.com> * less unwraps Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix clippy Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixups Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * deps Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix some tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix some tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix more tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix Signed-off-by: Jess Frazelle <github@jessfraz.com> * passing Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * up[date Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com> Co-authored-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
23
.github/dependabot.yml
vendored
Normal file
23
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# To get started with Dependabot version updates, you'll need to specify which
|
||||||
|
# package ecosystems to update and where the package manifests are located.
|
||||||
|
# Please see the documentation for all configuration options:
|
||||||
|
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: 'npm' # See documentation for possible values
|
||||||
|
directory: '/' # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: 'daily'
|
||||||
|
- package-ecosystem: 'github-actions' # See documentation for possible values
|
||||||
|
directory: '/' # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: 'daily'
|
||||||
|
- package-ecosystem: 'cargo' # See documentation for possible values
|
||||||
|
directory: '/src/wasm-lib/' # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: 'daily'
|
||||||
|
- package-ecosystem: 'cargo' # See documentation for possible values
|
||||||
|
directory: '/src-tauri/' # Location of package manifests
|
||||||
|
schedule:
|
||||||
|
interval: 'daily'
|
47
.github/workflows/cargo-build.yml
vendored
Normal file
47
.github/workflows/cargo-build.yml
vendored
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- '**.rs'
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- .github/workflows/cargo-build.yml
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**.rs'
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- .github/workflows/cargo-build.yml
|
||||||
|
name: cargo build
|
||||||
|
jobs:
|
||||||
|
cargobuild:
|
||||||
|
name: cargo build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
dir: ['src/wasm-lib', 'src-tauri']
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install latest rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
|
||||||
|
- name: install dependencies
|
||||||
|
if: matrix.dir == 'src-tauri'
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||||
|
- name: Rust Cache
|
||||||
|
uses: Swatinem/rust-cache@v2.6.1
|
||||||
|
|
||||||
|
- name: Run cargo build
|
||||||
|
run: |
|
||||||
|
cd "${{ matrix.dir }}"
|
||||||
|
cargo build --all
|
||||||
|
shell: bash
|
46
.github/workflows/cargo-clippy.yml
vendored
Normal file
46
.github/workflows/cargo-clippy.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- '**.rs'
|
||||||
|
- .github/workflows/cargo-clippy.yml
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- '**.rs'
|
||||||
|
- .github/workflows/cargo-build.yml
|
||||||
|
name: cargo clippy
|
||||||
|
jobs:
|
||||||
|
cargoclippy:
|
||||||
|
name: cargo clippy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
dir: ['src/wasm-lib', 'src-tauri']
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Install latest rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
components: clippy
|
||||||
|
|
||||||
|
- name: install dependencies
|
||||||
|
if: matrix.dir == 'src-tauri'
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||||
|
- name: Rust Cache
|
||||||
|
uses: Swatinem/rust-cache@v2.6.1
|
||||||
|
|
||||||
|
- name: Run clippy
|
||||||
|
run: |
|
||||||
|
cd "${{ matrix.dir }}"
|
||||||
|
cargo clippy --all --tests -- -D warnings
|
45
.github/workflows/cargo-fmt.yml
vendored
Normal file
45
.github/workflows/cargo-fmt.yml
vendored
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- '**.rs'
|
||||||
|
- .github/workflows/cargo-fmt.yml
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- '**.rs'
|
||||||
|
- .github/workflows/cargo-fmt.yml
|
||||||
|
permissions:
|
||||||
|
packages: read
|
||||||
|
contents: read
|
||||||
|
name: cargo fmt
|
||||||
|
jobs:
|
||||||
|
cargofmt:
|
||||||
|
name: cargo fmt
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
dir: ['src/wasm-lib', 'src-tauri']
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Install latest rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Rust Cache
|
||||||
|
uses: Swatinem/rust-cache@v2.6.1
|
||||||
|
|
||||||
|
- name: Run cargo fmt
|
||||||
|
run: |
|
||||||
|
cd "${{ matrix.dir }}"
|
||||||
|
cargo fmt -- --check
|
||||||
|
shell: bash
|
48
.github/workflows/cargo-test.yml
vendored
Normal file
48
.github/workflows/cargo-test.yml
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- '**.rs'
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- .github/workflows/cargo-test.yml
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**.rs'
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
- '**/rust-toolchain.toml'
|
||||||
|
- .github/workflows/cargo-test.yml
|
||||||
|
workflow_dispatch:
|
||||||
|
permissions: read-all
|
||||||
|
name: cargo test
|
||||||
|
jobs:
|
||||||
|
cargotest:
|
||||||
|
name: cargo test
|
||||||
|
runs-on: ubuntu-latest-8-cores
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
dir: ['src/wasm-lib', 'src-tauri']
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Install latest rust
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
override: true
|
||||||
|
- name: install dependencies
|
||||||
|
if: matrix.dir == 'src-tauri'
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||||
|
- uses: taiki-e/install-action@cargo-llvm-cov
|
||||||
|
- uses: taiki-e/install-action@nextest
|
||||||
|
- name: Rust Cache
|
||||||
|
uses: Swatinem/rust-cache@v2.6.1
|
||||||
|
- name: cargo test
|
||||||
|
shell: bash
|
||||||
|
run: |-
|
||||||
|
cd "${{ matrix.dir }}"
|
||||||
|
cargo llvm-cov nextest --lcov --output-path lcov.info --test-threads=1 --no-fail-fast
|
@ -8,8 +8,7 @@ import {
|
|||||||
} from 'react'
|
} from 'react'
|
||||||
import { DebugPanel } from './components/DebugPanel'
|
import { DebugPanel } from './components/DebugPanel'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { asyncLexer } from './lang/tokeniser'
|
import { asyncParser } from './lang/abstractSyntaxTree'
|
||||||
import { abstractSyntaxTree } from './lang/abstractSyntaxTree'
|
|
||||||
import { _executor } from './lang/executor'
|
import { _executor } from './lang/executor'
|
||||||
import CodeMirror from '@uiw/react-codemirror'
|
import CodeMirror from '@uiw/react-codemirror'
|
||||||
import { langs } from '@uiw/codemirror-extensions-langs'
|
import { langs } from '@uiw/codemirror-extensions-langs'
|
||||||
@ -283,8 +282,7 @@ export function App() {
|
|||||||
setAst(null)
|
setAst(null)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const tokens = await asyncLexer(code)
|
const _ast = await asyncParser(code)
|
||||||
const _ast = abstractSyntaxTree(tokens)
|
|
||||||
setAst(_ast)
|
setAst(_ast)
|
||||||
resetLogs()
|
resetLogs()
|
||||||
resetKCLErrors()
|
resetKCLErrors()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { useEffect, useState, useRef } from 'react'
|
import { useEffect, useState, useRef } from 'react'
|
||||||
import { abstractSyntaxTree } from '../lang/abstractSyntaxTree'
|
import { parser_wasm } from '../lang/abstractSyntaxTree'
|
||||||
import { BinaryPart, Value } from '../lang/abstractSyntaxTreeTypes'
|
import { BinaryPart, Value } from '../lang/abstractSyntaxTreeTypes'
|
||||||
import { executor } from '../lang/executor'
|
import { executor } from '../lang/executor'
|
||||||
import {
|
import {
|
||||||
@ -144,7 +144,7 @@ export function useCalc({
|
|||||||
if (!engineCommandManager) return
|
if (!engineCommandManager) return
|
||||||
try {
|
try {
|
||||||
const code = `const __result__ = ${value}\nshow(__result__)`
|
const code = `const __result__ = ${value}\nshow(__result__)`
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const _programMem: any = { root: {} }
|
const _programMem: any = { root: {} }
|
||||||
availableVarInfo.variables.forEach(({ key, value }) => {
|
availableVarInfo.variables.forEach(({ key, value }) => {
|
||||||
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
|
_programMem.root[key] = { type: 'userVal', value, __meta: [] }
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { processMemory } from './MemoryPanel'
|
import { processMemory } from './MemoryPanel'
|
||||||
import { lexer } from '../lang/tokeniser'
|
import { parser_wasm } from '../lang/abstractSyntaxTree'
|
||||||
import { abstractSyntaxTree } from '../lang/abstractSyntaxTree'
|
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import { initPromise } from '../lang/rust'
|
import { initPromise } from '../lang/rust'
|
||||||
|
|
||||||
@ -27,8 +26,7 @@ describe('processMemory', () => {
|
|||||||
|> lineTo([2.15, 4.32], %)
|
|> lineTo([2.15, 4.32], %)
|
||||||
// |> rx(90, %)
|
// |> rx(90, %)
|
||||||
show(theExtrude, theSketch)`
|
show(theExtrude, theSketch)`
|
||||||
const tokens = lexer(code)
|
const ast = parser_wasm(code)
|
||||||
const ast = abstractSyntaxTree(tokens)
|
|
||||||
const programMemory = await enginelessExecutor(ast, {
|
const programMemory = await enginelessExecutor(ast, {
|
||||||
root: {
|
root: {
|
||||||
log: {
|
log: {
|
||||||
|
@ -1,43 +1,13 @@
|
|||||||
import {
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
abstractSyntaxTree,
|
|
||||||
findClosingBrace,
|
|
||||||
hasPipeOperator,
|
|
||||||
findEndOfBinaryExpression,
|
|
||||||
} from './abstractSyntaxTree'
|
|
||||||
import { lexer } from './tokeniser'
|
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
|
|
||||||
describe('findClosingBrace', () => {
|
|
||||||
test('finds the closing brace', () => {
|
|
||||||
const basic = '( hey )'
|
|
||||||
expect(findClosingBrace(lexer(basic), 0)).toBe(4)
|
|
||||||
|
|
||||||
const handlesNonZeroIndex =
|
|
||||||
'(indexForBracketToRightOfThisIsTwo(shouldBeFour)AndNotThisSix)'
|
|
||||||
expect(findClosingBrace(lexer(handlesNonZeroIndex), 2)).toBe(4)
|
|
||||||
expect(findClosingBrace(lexer(handlesNonZeroIndex), 0)).toBe(6)
|
|
||||||
|
|
||||||
const handlesNested =
|
|
||||||
'{a{b{c(}d]}eathou athoeu tah u} thatOneToTheLeftIsLast }'
|
|
||||||
expect(findClosingBrace(lexer(handlesNested), 0)).toBe(18)
|
|
||||||
|
|
||||||
// throws when not started on a brace
|
|
||||||
expect(() => findClosingBrace(lexer(handlesNested), 1)).toThrow()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('testing AST', () => {
|
describe('testing AST', () => {
|
||||||
test('5 + 6', () => {
|
test('5 + 6', () => {
|
||||||
const tokens = lexer('5 +6')
|
const result = parser_wasm('5 +6')
|
||||||
const result = abstractSyntaxTree(tokens)
|
|
||||||
delete (result as any).nonCodeMeta
|
delete (result as any).nonCodeMeta
|
||||||
expect(result).toEqual({
|
expect(result.body).toEqual([
|
||||||
type: 'Program',
|
|
||||||
start: 0,
|
|
||||||
end: 4,
|
|
||||||
body: [
|
|
||||||
{
|
{
|
||||||
type: 'ExpressionStatement',
|
type: 'ExpressionStatement',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -63,12 +33,10 @@ describe('testing AST', () => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
],
|
])
|
||||||
})
|
|
||||||
})
|
})
|
||||||
test('const myVar = 5', () => {
|
test('const myVar = 5', () => {
|
||||||
const tokens = lexer('const myVar = 5')
|
const { body } = parser_wasm('const myVar = 5')
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -102,8 +70,7 @@ describe('testing AST', () => {
|
|||||||
const code = `const myVar = 5
|
const code = `const myVar = 5
|
||||||
const newVar = myVar + 1
|
const newVar = myVar + 1
|
||||||
`
|
`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -173,8 +140,7 @@ const newVar = myVar + 1
|
|||||||
})
|
})
|
||||||
test('using std function "log"', () => {
|
test('using std function "log"', () => {
|
||||||
const code = `log(5, "hello", aIdentifier)`
|
const code = `log(5, "hello", aIdentifier)`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'ExpressionStatement',
|
type: 'ExpressionStatement',
|
||||||
@ -221,8 +187,7 @@ const newVar = myVar + 1
|
|||||||
|
|
||||||
describe('testing function declaration', () => {
|
describe('testing function declaration', () => {
|
||||||
test('fn funcN = () => {}', () => {
|
test('fn funcN = () => {}', () => {
|
||||||
const tokens = lexer('fn funcN = () => {}')
|
const { body } = parser_wasm('fn funcN = () => {}')
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -260,10 +225,9 @@ describe('testing function declaration', () => {
|
|||||||
])
|
])
|
||||||
})
|
})
|
||||||
test('fn funcN = (a, b) => {return a + b}', () => {
|
test('fn funcN = (a, b) => {return a + b}', () => {
|
||||||
const tokens = lexer(
|
const { body } = parser_wasm(
|
||||||
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
|
['fn funcN = (a, b) => {', ' return a + b', '}'].join('\n')
|
||||||
)
|
)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -338,11 +302,9 @@ describe('testing function declaration', () => {
|
|||||||
])
|
])
|
||||||
})
|
})
|
||||||
test('call expression assignment', () => {
|
test('call expression assignment', () => {
|
||||||
const tokens = lexer(
|
const code = `fn funcN = (a, b) => { return a + b }
|
||||||
`fn funcN = (a, b) => { return a + b }
|
|
||||||
const myVar = funcN(1, 2)`
|
const myVar = funcN(1, 2)`
|
||||||
)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.body.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -465,99 +427,15 @@ const myVar = funcN(1, 2)`
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('testing hasPipeOperator', () => {
|
|
||||||
test('hasPipeOperator is true', () => {
|
|
||||||
let code = `sketch mySketch {
|
|
||||||
lineTo(2, 3)
|
|
||||||
} |> rx(45, %)
|
|
||||||
`
|
|
||||||
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const result = hasPipeOperator(tokens, 0)
|
|
||||||
delete (result as any).bonusNonCodeNode
|
|
||||||
expect(result).toEqual({
|
|
||||||
index: 16,
|
|
||||||
token: { end: 37, start: 35, type: 'operator', value: '|>' },
|
|
||||||
})
|
|
||||||
})
|
|
||||||
test('matches the first pipe', () => {
|
|
||||||
let code = `sketch mySketch {
|
|
||||||
lineTo(2, 3)
|
|
||||||
} |> rx(45, %) |> rx(45, %)
|
|
||||||
`
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const result = hasPipeOperator(tokens, 0)
|
|
||||||
delete (result as any).bonusNonCodeNode
|
|
||||||
expect(result).toEqual({
|
|
||||||
index: 16,
|
|
||||||
token: { end: 37, start: 35, type: 'operator', value: '|>' },
|
|
||||||
})
|
|
||||||
if (!result) throw new Error('should not happen')
|
|
||||||
expect(code.slice(result.token.start, result.token.end)).toEqual('|>')
|
|
||||||
})
|
|
||||||
test('hasPipeOperator is false when the pipe operator is after a new variable declaration', () => {
|
|
||||||
let code = `sketch mySketch {
|
|
||||||
lineTo(2, 3)
|
|
||||||
}
|
|
||||||
const yo = myFunc(9()
|
|
||||||
|> rx(45, %)
|
|
||||||
`
|
|
||||||
const tokens = lexer(code)
|
|
||||||
expect(hasPipeOperator(tokens, 0)).toEqual(false)
|
|
||||||
})
|
|
||||||
test('hasPipeOperator with binary expression', () => {
|
|
||||||
let code = `const myVar2 = 5 + 1 |> myFn(%)`
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const result = hasPipeOperator(tokens, 1)
|
|
||||||
delete (result as any).bonusNonCodeNode
|
|
||||||
expect(result).toEqual({
|
|
||||||
index: 12,
|
|
||||||
token: { end: 23, start: 21, type: 'operator', value: '|>' },
|
|
||||||
})
|
|
||||||
if (!result) throw new Error('should not happen')
|
|
||||||
expect(code.slice(result.token.start, result.token.end)).toEqual('|>')
|
|
||||||
})
|
|
||||||
test('hasPipeOperator of called mid sketchExpression on a callExpression, and called at the start of the sketchExpression at "{"', () => {
|
|
||||||
const code = [
|
|
||||||
'sketch mySk1 {',
|
|
||||||
' lineTo(1,1)',
|
|
||||||
' path myPath = lineTo(0, 1)',
|
|
||||||
' lineTo(1,1)',
|
|
||||||
'} |> rx(90, %)',
|
|
||||||
'show(mySk1)',
|
|
||||||
].join('\n')
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const tokenWithMyPathIndex = tokens.findIndex(
|
|
||||||
({ value }) => value === 'myPath'
|
|
||||||
)
|
|
||||||
const tokenWithLineToIndexForVarDecIndex = tokens.findIndex(
|
|
||||||
({ value }, index) => value === 'lineTo' && index > tokenWithMyPathIndex
|
|
||||||
)
|
|
||||||
const result = hasPipeOperator(tokens, tokenWithLineToIndexForVarDecIndex)
|
|
||||||
expect(result).toBe(false)
|
|
||||||
|
|
||||||
const braceTokenIndex = tokens.findIndex(({ value }) => value === '{')
|
|
||||||
const result2 = hasPipeOperator(tokens, braceTokenIndex)
|
|
||||||
delete (result2 as any).bonusNonCodeNode
|
|
||||||
expect(result2).toEqual({
|
|
||||||
index: 36,
|
|
||||||
token: { end: 76, start: 74, type: 'operator', value: '|>' },
|
|
||||||
})
|
|
||||||
if (!result2) throw new Error('should not happen')
|
|
||||||
expect(code.slice(result2?.token?.start, result2?.token?.end)).toEqual('|>')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('testing pipe operator special', () => {
|
describe('testing pipe operator special', () => {
|
||||||
test('pipe operator with sketch', () => {
|
test('pipe operator with sketch', () => {
|
||||||
let code = `const mySketch = startSketchAt([0, 0])
|
let code = `const mySketch = startSketchAt([0, 0])
|
||||||
|> lineTo([2, 3], %)
|
|> lineTo([2, 3], %)
|
||||||
|> lineTo({ to: [0, 1], tag: "myPath" }, %)
|
|> lineTo({ to: [0, 1], tag: "myPath" }, %)
|
||||||
|> lineTo([1, 1], %)
|
|> lineTo([1, 1], %)
|
||||||
} |> rx(45, %)
|
|> rx(45, %)
|
||||||
`
|
`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
delete (body[0] as any).declarations[0].init.nonCodeMeta
|
delete (body[0] as any).declarations[0].init.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -786,8 +664,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('pipe operator with binary expression', () => {
|
test('pipe operator with binary expression', () => {
|
||||||
let code = `const myVar = 5 + 6 |> myFunc(45, %)`
|
let code = `const myVar = 5 + 6 |> myFunc(45, %)`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
delete (body as any)[0].declarations[0].init.nonCodeMeta
|
delete (body as any)[0].declarations[0].init.nonCodeMeta
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
@ -866,8 +743,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('array expression', () => {
|
test('array expression', () => {
|
||||||
let code = `const yo = [1, '2', three, 4 + 5]`
|
let code = `const yo = [1, '2', three, 4 + 5]`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -942,8 +818,7 @@ describe('testing pipe operator special', () => {
|
|||||||
'const three = 3',
|
'const three = 3',
|
||||||
"const yo = {aStr: 'str', anum: 2, identifier: three, binExp: 4 + 5}",
|
"const yo = {aStr: 'str', anum: 2, identifier: three, binExp: 4 + 5}",
|
||||||
].join('\n')
|
].join('\n')
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1087,8 +962,7 @@ describe('testing pipe operator special', () => {
|
|||||||
const code = `const yo = {key: {
|
const code = `const yo = {key: {
|
||||||
key2: 'value'
|
key2: 'value'
|
||||||
}}`
|
}}`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1156,8 +1030,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object expression with array ast', () => {
|
test('object expression with array ast', () => {
|
||||||
const code = `const yo = {key: [1, '2']}`
|
const code = `const yo = {key: [1, '2']}`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1221,8 +1094,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression simple', () => {
|
test('object memberExpression simple', () => {
|
||||||
const code = `const prop = yo.one.two`
|
const code = `const prop = yo.one.two`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1277,8 +1149,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression with square braces', () => {
|
test('object memberExpression with square braces', () => {
|
||||||
const code = `const prop = yo.one["two"]`
|
const code = `const prop = yo.one["two"]`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1334,8 +1205,7 @@ describe('testing pipe operator special', () => {
|
|||||||
})
|
})
|
||||||
test('object memberExpression with two square braces literal and identifier', () => {
|
test('object memberExpression with two square braces literal and identifier', () => {
|
||||||
const code = `const prop = yo["one"][two]`
|
const code = `const prop = yo["one"][two]`
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
expect(body).toEqual([
|
expect(body).toEqual([
|
||||||
{
|
{
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
@ -1394,7 +1264,7 @@ describe('testing pipe operator special', () => {
|
|||||||
describe('nests binary expressions correctly', () => {
|
describe('nests binary expressions correctly', () => {
|
||||||
it('works with the simple case', () => {
|
it('works with the simple case', () => {
|
||||||
const code = `const yo = 1 + 2`
|
const code = `const yo = 1 + 2`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1438,7 +1308,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
it('should nest according to precedence with multiply first', () => {
|
it('should nest according to precedence with multiply first', () => {
|
||||||
// should be binExp { binExp { lit-1 * lit-2 } + lit}
|
// should be binExp { binExp { lit-1 * lit-2 } + lit}
|
||||||
const code = `const yo = 1 * 2 + 3`
|
const code = `const yo = 1 * 2 + 3`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1495,7 +1365,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
it('should nest according to precedence with sum first', () => {
|
it('should nest according to precedence with sum first', () => {
|
||||||
// should be binExp { lit-1 + binExp { lit-2 * lit-3 } }
|
// should be binExp { lit-1 + binExp { lit-2 * lit-3 } }
|
||||||
const code = `const yo = 1 + 2 * 3`
|
const code = `const yo = 1 + 2 * 3`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
expect(body[0]).toEqual({
|
expect(body[0]).toEqual({
|
||||||
type: 'VariableDeclaration',
|
type: 'VariableDeclaration',
|
||||||
start: 0,
|
start: 0,
|
||||||
@ -1551,7 +1421,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with two opperators of equal precedence', () => {
|
it('should nest properly with two opperators of equal precedence', () => {
|
||||||
const code = `const yo = 1 + 2 - 3`
|
const code = `const yo = 1 + 2 - 3`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
expect((body[0] as any).declarations[0].init).toEqual({
|
expect((body[0] as any).declarations[0].init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
@ -1588,7 +1458,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with two opperators of equal (but higher) precedence', () => {
|
it('should nest properly with two opperators of equal (but higher) precedence', () => {
|
||||||
const code = `const yo = 1 * 2 / 3`
|
const code = `const yo = 1 * 2 / 3`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
expect((body[0] as any).declarations[0].init).toEqual({
|
expect((body[0] as any).declarations[0].init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
start: 11,
|
start: 11,
|
||||||
@ -1625,7 +1495,7 @@ describe('nests binary expressions correctly', () => {
|
|||||||
})
|
})
|
||||||
it('should nest properly with longer example', () => {
|
it('should nest properly with longer example', () => {
|
||||||
const code = `const yo = 1 + 2 * (3 - 4) / 5 + 6`
|
const code = `const yo = 1 + 2 * (3 - 4) / 5 + 6`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
const init = (body[0] as any).declarations[0].init
|
const init = (body[0] as any).declarations[0].init
|
||||||
expect(init).toEqual({
|
expect(init).toEqual({
|
||||||
type: 'BinaryExpression',
|
type: 'BinaryExpression',
|
||||||
@ -1684,13 +1554,13 @@ const key = 'c'`
|
|||||||
end: code.indexOf('const key'),
|
end: code.indexOf('const key'),
|
||||||
value: '\n// this is a comment\n',
|
value: '\n// this is a comment\n',
|
||||||
}
|
}
|
||||||
const { nonCodeMeta } = abstractSyntaxTree(lexer(code))
|
const { nonCodeMeta } = parser_wasm(code)
|
||||||
expect(nonCodeMeta.noneCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
expect(nonCodeMeta.noneCodeNodes[0]).toEqual(nonCodeMetaInstance)
|
||||||
|
|
||||||
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
|
// extra whitespace won't change it's position (0) or value (NB the start end would have changed though)
|
||||||
const codeWithExtraStartWhitespace = '\n\n\n' + code
|
const codeWithExtraStartWhitespace = '\n\n\n' + code
|
||||||
const { nonCodeMeta: nonCodeMeta2 } = abstractSyntaxTree(
|
const { nonCodeMeta: nonCodeMeta2 } = parser_wasm(
|
||||||
lexer(codeWithExtraStartWhitespace)
|
codeWithExtraStartWhitespace
|
||||||
)
|
)
|
||||||
expect(nonCodeMeta2.noneCodeNodes[0].value).toBe(nonCodeMetaInstance.value)
|
expect(nonCodeMeta2.noneCodeNodes[0].value).toBe(nonCodeMetaInstance.value)
|
||||||
expect(nonCodeMeta2.noneCodeNodes[0].start).not.toBe(
|
expect(nonCodeMeta2.noneCodeNodes[0].start).not.toBe(
|
||||||
@ -1707,7 +1577,7 @@ const key = 'c'`
|
|||||||
|> close(%)
|
|> close(%)
|
||||||
`
|
`
|
||||||
|
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
const indexOfSecondLineToExpression = 2
|
const indexOfSecondLineToExpression = 2
|
||||||
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
|
const sketchNonCodeMeta = (body as any)[0].declarations[0].init.nonCodeMeta
|
||||||
.noneCodeNodes
|
.noneCodeNodes
|
||||||
@ -1729,7 +1599,7 @@ const key = 'c'`
|
|||||||
' |> rx(90, %)',
|
' |> rx(90, %)',
|
||||||
].join('\n')
|
].join('\n')
|
||||||
|
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
|
const sketchNonCodeMeta = (body[0] as any).declarations[0].init.nonCodeMeta
|
||||||
.noneCodeNodes
|
.noneCodeNodes
|
||||||
expect(sketchNonCodeMeta[3]).toEqual({
|
expect(sketchNonCodeMeta[3]).toEqual({
|
||||||
@ -1741,72 +1611,10 @@ const key = 'c'`
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('testing findEndofBinaryExpression', () => {
|
|
||||||
it('1 + 2 * 3', () => {
|
|
||||||
const code = `1 + 2 * 3\nconst yo = 5`
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].value).toBe('3')
|
|
||||||
})
|
|
||||||
it('(1 + 2) / 5 - 3', () => {
|
|
||||||
const code = `(1 + 25) / 5 - 3\nconst yo = 5`
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].value).toBe('3')
|
|
||||||
|
|
||||||
// expect to have the same end if started later in the string at a legitimate place
|
|
||||||
const indexOf5 = code.indexOf('5')
|
|
||||||
const endStartingAtThe5 = findEndOfBinaryExpression(tokens, indexOf5)
|
|
||||||
expect(endStartingAtThe5).toBe(end)
|
|
||||||
})
|
|
||||||
it('whole thing wraped: ((1 + 2) / 5 - 3)', () => {
|
|
||||||
const code = '((1 + 2) / 5 - 3)\nconst yo = 5'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].end).toBe(code.indexOf('3)') + 2)
|
|
||||||
})
|
|
||||||
it('whole thing wraped but given index after the first brace: ((1 + 2) / 5 - 3)', () => {
|
|
||||||
const code = '((1 + 2) / 5 - 3)\nconst yo = 5'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 1)
|
|
||||||
expect(tokens[end].value).toBe('3')
|
|
||||||
})
|
|
||||||
it('given the index of a small wrapped section i.e. `1 + 2` in ((1 + 2) / 5 - 3)', () => {
|
|
||||||
const code = '((1 + 2) / 5 - 3)\nconst yo = 5'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 2)
|
|
||||||
expect(tokens[end].value).toBe('2')
|
|
||||||
})
|
|
||||||
it('lots of silly nesting: (1 + 2) / (5 - (3))', () => {
|
|
||||||
const code = '(1 + 2) / (5 - (3))\nconst yo = 5'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].end).toBe(code.indexOf('))') + 2)
|
|
||||||
})
|
|
||||||
it('with pipe operator at the end', () => {
|
|
||||||
const code = '(1 + 2) / (5 - (3))\n |> fn(%)'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].end).toBe(code.indexOf('))') + 2)
|
|
||||||
})
|
|
||||||
it('with call expression at the start of binary expression', () => {
|
|
||||||
const code = 'yo(2) + 3\n |> fn(%)'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].value).toBe('3')
|
|
||||||
})
|
|
||||||
it('with call expression at the end of binary expression', () => {
|
|
||||||
const code = '3 + yo(2)\n |> fn(%)'
|
|
||||||
const tokens = lexer(code)
|
|
||||||
const end = findEndOfBinaryExpression(tokens, 0)
|
|
||||||
expect(tokens[end].value).toBe(')')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('test UnaryExpression', () => {
|
describe('test UnaryExpression', () => {
|
||||||
it('should parse a unary expression in simple var dec situation', () => {
|
it('should parse a unary expression in simple var dec situation', () => {
|
||||||
const code = `const myVar = -min(4, 100)`
|
const code = `const myVar = -min(4, 100)`
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
||||||
expect(myVarInit).toEqual({
|
expect(myVarInit).toEqual({
|
||||||
type: 'UnaryExpression',
|
type: 'UnaryExpression',
|
||||||
@ -1831,7 +1639,7 @@ describe('test UnaryExpression', () => {
|
|||||||
describe('testing nested call expressions', () => {
|
describe('testing nested call expressions', () => {
|
||||||
it('callExp in a binExp in a callExp', () => {
|
it('callExp in a binExp in a callExp', () => {
|
||||||
const code = 'const myVar = min(100, 1 + legLen(5, 3))'
|
const code = 'const myVar = min(100, 1 + legLen(5, 3))'
|
||||||
const { body } = abstractSyntaxTree(lexer(code))
|
const { body } = parser_wasm(code)
|
||||||
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
const myVarInit = (body?.[0] as any).declarations[0]?.init
|
||||||
expect(myVarInit).toEqual({
|
expect(myVarInit).toEqual({
|
||||||
type: 'CallExpression',
|
type: 'CallExpression',
|
||||||
@ -1867,8 +1675,7 @@ describe('testing nested call expressions', () => {
|
|||||||
describe('should recognise callExpresions in binaryExpressions', () => {
|
describe('should recognise callExpresions in binaryExpressions', () => {
|
||||||
const code = "xLineTo(segEndX('seg02', %) + 1, %)"
|
const code = "xLineTo(segEndX('seg02', %) + 1, %)"
|
||||||
it('should recognise the callExp', () => {
|
it('should recognise the callExp', () => {
|
||||||
const tokens = lexer(code)
|
const { body } = parser_wasm(code)
|
||||||
const { body } = abstractSyntaxTree(tokens)
|
|
||||||
const callExpArgs = (body?.[0] as any).expression?.arguments
|
const callExpArgs = (body?.[0] as any).expression?.arguments
|
||||||
expect(callExpArgs).toEqual([
|
expect(callExpArgs).toEqual([
|
||||||
{
|
{
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,4 @@
|
|||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import { lexer } from './tokeniser'
|
|
||||||
import { SketchGroup, ExtrudeGroup } from './executor'
|
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
import { enginelessExecutor, executor } from '../lib/testHelpers'
|
import { enginelessExecutor, executor } from '../lib/testHelpers'
|
||||||
|
|
||||||
@ -15,9 +13,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
// |> rx(45, %)
|
// |> rx(45, %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const programMemory = await enginelessExecutor(
|
const programMemory = await enginelessExecutor(parser_wasm(code))
|
||||||
abstractSyntaxTree(lexer(code))
|
|
||||||
)
|
|
||||||
const shown = programMemory?.return?.map(
|
const shown = programMemory?.return?.map(
|
||||||
(a) => programMemory?.root?.[a.name]
|
(a) => programMemory?.root?.[a.name]
|
||||||
)
|
)
|
||||||
@ -72,9 +68,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
// |> rx(45, %)
|
// |> rx(45, %)
|
||||||
|> extrude(2, %)
|
|> extrude(2, %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const programMemory = await enginelessExecutor(
|
const programMemory = await enginelessExecutor(parser_wasm(code))
|
||||||
abstractSyntaxTree(lexer(code))
|
|
||||||
)
|
|
||||||
const shown = programMemory?.return?.map(
|
const shown = programMemory?.return?.map(
|
||||||
(a) => programMemory?.root?.[a.name]
|
(a) => programMemory?.root?.[a.name]
|
||||||
)
|
)
|
||||||
@ -115,9 +109,7 @@ const sk2 = startSketchAt([0, 0])
|
|||||||
|
|
||||||
|
|
||||||
show(theExtrude, sk2)`
|
show(theExtrude, sk2)`
|
||||||
const programMemory = await enginelessExecutor(
|
const programMemory = await enginelessExecutor(parser_wasm(code))
|
||||||
abstractSyntaxTree(lexer(code))
|
|
||||||
)
|
|
||||||
const geos = programMemory?.return?.map(
|
const geos = programMemory?.return?.map(
|
||||||
({ name }) => programMemory?.root?.[name]
|
({ name }) => programMemory?.root?.[name]
|
||||||
)
|
)
|
||||||
|
@ -1,207 +0,0 @@
|
|||||||
import { parseExpression, reversePolishNotation } from './astMathExpressions'
|
|
||||||
import { lexer } from './tokeniser'
|
|
||||||
import { initPromise } from './rust'
|
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
|
||||||
|
|
||||||
describe('parseExpression', () => {
|
|
||||||
it('parses a simple expression', () => {
|
|
||||||
const result = parseExpression(lexer('1 + 2'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 0,
|
|
||||||
end: 5,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: { type: 'Literal', value: 2, raw: '2', start: 4, end: 5 },
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('parses a more complex expression + followed by *', () => {
|
|
||||||
const tokens = lexer('1 + 2 * 3')
|
|
||||||
const result = parseExpression(tokens)
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 0,
|
|
||||||
end: 9,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '*',
|
|
||||||
start: 4,
|
|
||||||
end: 9,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 4, end: 5 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 8, end: 9 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('parses a more complex expression with parentheses: 1 * ( 2 + 3 )', () => {
|
|
||||||
const result = parseExpression(lexer('1 * ( 2 + 3 )'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '*',
|
|
||||||
start: 0,
|
|
||||||
end: 13,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 6,
|
|
||||||
end: 11,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 6, end: 7 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 10, end: 11 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('parses a more complex expression with parentheses with more', () => {
|
|
||||||
const result = parseExpression(lexer('1 * ( 2 + 3 ) / 4'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '/',
|
|
||||||
start: 0,
|
|
||||||
end: 17,
|
|
||||||
left: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '*',
|
|
||||||
start: 0,
|
|
||||||
end: 13,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 6,
|
|
||||||
end: 11,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 6, end: 7 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 10, end: 11 },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
right: { type: 'Literal', value: 4, raw: '4', start: 16, end: 17 },
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('same as last one but with a 1 + at the start', () => {
|
|
||||||
const result = parseExpression(lexer('1 + ( 2 + 3 ) / 4'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 0,
|
|
||||||
end: 17,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '/',
|
|
||||||
start: 4,
|
|
||||||
end: 17,
|
|
||||||
left: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 6,
|
|
||||||
end: 11,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 6, end: 7 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 10, end: 11 },
|
|
||||||
},
|
|
||||||
right: { type: 'Literal', value: 4, raw: '4', start: 16, end: 17 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('nested braces', () => {
|
|
||||||
const result = parseExpression(lexer('1 * (( 2 + 3 ) / 4 + 5 )'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '*',
|
|
||||||
start: 0,
|
|
||||||
end: 24,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 5,
|
|
||||||
end: 22,
|
|
||||||
left: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '/',
|
|
||||||
start: 5,
|
|
||||||
end: 18,
|
|
||||||
left: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 7,
|
|
||||||
end: 12,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 7, end: 8 },
|
|
||||||
right: {
|
|
||||||
type: 'Literal',
|
|
||||||
value: 3,
|
|
||||||
raw: '3',
|
|
||||||
start: 11,
|
|
||||||
end: 12,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
right: { type: 'Literal', value: 4, raw: '4', start: 17, end: 18 },
|
|
||||||
},
|
|
||||||
right: { type: 'Literal', value: 5, raw: '5', start: 21, end: 22 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('multiple braces around the same thing', () => {
|
|
||||||
const result = parseExpression(lexer('1 * ((( 2 + 3 )))'))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '*',
|
|
||||||
start: 0,
|
|
||||||
end: 17,
|
|
||||||
left: { type: 'Literal', value: 1, raw: '1', start: 0, end: 1 },
|
|
||||||
right: {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 8,
|
|
||||||
end: 13,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 8, end: 9 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 12, end: 13 },
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
it('multiple braces around a sing literal', () => {
|
|
||||||
const code = '2 + (((3)))'
|
|
||||||
const result = parseExpression(lexer(code))
|
|
||||||
expect(result).toEqual({
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: '+',
|
|
||||||
start: 0,
|
|
||||||
end: code.indexOf(')))') + 3,
|
|
||||||
left: { type: 'Literal', value: 2, raw: '2', start: 0, end: 1 },
|
|
||||||
right: { type: 'Literal', value: 3, raw: '3', start: 7, end: 8 },
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('reversePolishNotation', () => {
|
|
||||||
it('converts a simple expression', () => {
|
|
||||||
const result = reversePolishNotation(lexer('1 + 2'))
|
|
||||||
expect(result).toEqual([
|
|
||||||
{ type: 'number', value: '1', start: 0, end: 1 },
|
|
||||||
{ type: 'number', value: '2', start: 4, end: 5 },
|
|
||||||
{ type: 'operator', value: '+', start: 2, end: 3 },
|
|
||||||
])
|
|
||||||
})
|
|
||||||
it('converts a more complex expression', () => {
|
|
||||||
const result = reversePolishNotation(lexer('1 + 2 * 3'))
|
|
||||||
expect(result).toEqual([
|
|
||||||
{ type: 'number', value: '1', start: 0, end: 1 },
|
|
||||||
{ type: 'number', value: '2', start: 4, end: 5 },
|
|
||||||
{ type: 'number', value: '3', start: 8, end: 9 },
|
|
||||||
{ type: 'operator', value: '*', start: 6, end: 7 },
|
|
||||||
{ type: 'operator', value: '+', start: 2, end: 3 },
|
|
||||||
])
|
|
||||||
})
|
|
||||||
it('converts a more complex expression with parentheses', () => {
|
|
||||||
const result = reversePolishNotation(lexer('1 * ( 2 + 3 )'))
|
|
||||||
expect(result).toEqual([
|
|
||||||
{ type: 'number', value: '1', start: 0, end: 1 },
|
|
||||||
{ type: 'brace', value: '(', start: 4, end: 5 },
|
|
||||||
{ type: 'number', value: '2', start: 6, end: 7 },
|
|
||||||
{ type: 'number', value: '3', start: 10, end: 11 },
|
|
||||||
{ type: 'operator', value: '+', start: 8, end: 9 },
|
|
||||||
{ type: 'brace', value: ')', start: 12, end: 13 },
|
|
||||||
{ type: 'operator', value: '*', start: 2, end: 3 },
|
|
||||||
])
|
|
||||||
})
|
|
||||||
})
|
|
@ -1,253 +0,0 @@
|
|||||||
import {
|
|
||||||
BinaryExpression,
|
|
||||||
Literal,
|
|
||||||
Identifier,
|
|
||||||
CallExpression,
|
|
||||||
} from './abstractSyntaxTreeTypes'
|
|
||||||
import {
|
|
||||||
findClosingBrace,
|
|
||||||
makeCallExpression,
|
|
||||||
isNotCodeToken,
|
|
||||||
} from './abstractSyntaxTree'
|
|
||||||
import { Token } from './tokeniser'
|
|
||||||
import { KCLSyntaxError } from './errors'
|
|
||||||
|
|
||||||
export function reversePolishNotation(
|
|
||||||
tokens: Token[],
|
|
||||||
previousPostfix: Token[] = [],
|
|
||||||
operators: Token[] = []
|
|
||||||
): Token[] {
|
|
||||||
if (tokens.length === 0) {
|
|
||||||
return [...previousPostfix, ...operators.slice().reverse()] // reverse mutates, so slice/clone is needed
|
|
||||||
}
|
|
||||||
const currentToken = tokens[0]
|
|
||||||
if (
|
|
||||||
currentToken.type === 'word' &&
|
|
||||||
tokens?.[1]?.type === 'brace' &&
|
|
||||||
tokens?.[1]?.value === '('
|
|
||||||
) {
|
|
||||||
const closingBrace = findClosingBrace(tokens, 1)
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens.slice(closingBrace + 1),
|
|
||||||
[...previousPostfix, ...tokens.slice(0, closingBrace + 1)],
|
|
||||||
operators
|
|
||||||
)
|
|
||||||
} else if (
|
|
||||||
currentToken.type === 'number' ||
|
|
||||||
currentToken.type === 'word' ||
|
|
||||||
currentToken.type === 'string'
|
|
||||||
) {
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens.slice(1),
|
|
||||||
[...previousPostfix, currentToken],
|
|
||||||
operators
|
|
||||||
)
|
|
||||||
} else if (['+', '-', '*', '/', '%'].includes(currentToken.value)) {
|
|
||||||
if (
|
|
||||||
operators.length > 0 &&
|
|
||||||
_precedence(operators[operators.length - 1]) >= _precedence(currentToken)
|
|
||||||
) {
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens,
|
|
||||||
[...previousPostfix, operators[operators.length - 1]],
|
|
||||||
operators.slice(0, -1)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return reversePolishNotation(tokens.slice(1), previousPostfix, [
|
|
||||||
...operators,
|
|
||||||
currentToken,
|
|
||||||
])
|
|
||||||
} else if (currentToken.value === '(') {
|
|
||||||
// push current token to both stacks as it is a legitimate operator
|
|
||||||
// but later we'll need to pop other operators off the stack until we find the matching ')'
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens.slice(1),
|
|
||||||
[...previousPostfix, currentToken],
|
|
||||||
[...operators, currentToken]
|
|
||||||
)
|
|
||||||
} else if (currentToken.value === ')') {
|
|
||||||
if (operators[operators.length - 1]?.value !== '(') {
|
|
||||||
// pop operators off the stack and push them to postFix until we find the matching '('
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens,
|
|
||||||
[...previousPostfix, operators[operators.length - 1]],
|
|
||||||
operators.slice(0, -1)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return reversePolishNotation(
|
|
||||||
tokens.slice(1),
|
|
||||||
[...previousPostfix, currentToken],
|
|
||||||
operators.slice(0, -1)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if (isNotCodeToken(currentToken)) {
|
|
||||||
return reversePolishNotation(tokens.slice(1), previousPostfix, operators)
|
|
||||||
}
|
|
||||||
throw new KCLSyntaxError('Unknown token', [
|
|
||||||
[currentToken.start, currentToken.end],
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ParenthesisToken {
|
|
||||||
type: 'parenthesis'
|
|
||||||
value: '(' | ')'
|
|
||||||
start: number
|
|
||||||
end: number
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ExtendedBinaryExpression extends BinaryExpression {
|
|
||||||
startExtended?: number
|
|
||||||
endExtended?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
const buildTree = (
|
|
||||||
reversePolishNotationTokens: Token[],
|
|
||||||
stack: (
|
|
||||||
| ExtendedBinaryExpression
|
|
||||||
| Literal
|
|
||||||
| Identifier
|
|
||||||
| ParenthesisToken
|
|
||||||
| CallExpression
|
|
||||||
)[] = []
|
|
||||||
): BinaryExpression => {
|
|
||||||
if (reversePolishNotationTokens.length === 0) {
|
|
||||||
return stack[0] as BinaryExpression
|
|
||||||
}
|
|
||||||
const currentToken = reversePolishNotationTokens[0]
|
|
||||||
if (currentToken.type === 'number' || currentToken.type === 'string') {
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(1), [
|
|
||||||
...stack,
|
|
||||||
{
|
|
||||||
type: 'Literal',
|
|
||||||
value:
|
|
||||||
currentToken.type === 'number'
|
|
||||||
? Number(currentToken.value)
|
|
||||||
: currentToken.value.slice(1, -1),
|
|
||||||
raw: currentToken.value,
|
|
||||||
start: currentToken.start,
|
|
||||||
end: currentToken.end,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
} else if (currentToken.type === 'word') {
|
|
||||||
if (
|
|
||||||
reversePolishNotationTokens?.[1]?.type === 'brace' &&
|
|
||||||
reversePolishNotationTokens?.[1]?.value === '('
|
|
||||||
) {
|
|
||||||
const closingBrace = findClosingBrace(reversePolishNotationTokens, 1)
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(closingBrace + 1), [
|
|
||||||
...stack,
|
|
||||||
makeCallExpression(reversePolishNotationTokens, 0).expression,
|
|
||||||
])
|
|
||||||
}
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(1), [
|
|
||||||
...stack,
|
|
||||||
{
|
|
||||||
type: 'Identifier',
|
|
||||||
name: currentToken.value,
|
|
||||||
start: currentToken.start,
|
|
||||||
end: currentToken.end,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
} else if (currentToken.type === 'brace' && currentToken.value === '(') {
|
|
||||||
const paranToken: ParenthesisToken = {
|
|
||||||
type: 'parenthesis',
|
|
||||||
value: '(',
|
|
||||||
start: currentToken.start,
|
|
||||||
end: currentToken.end,
|
|
||||||
}
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(1), [
|
|
||||||
...stack,
|
|
||||||
paranToken,
|
|
||||||
])
|
|
||||||
} else if (currentToken.type === 'brace' && currentToken.value === ')') {
|
|
||||||
const innerNode = stack[stack.length - 1]
|
|
||||||
|
|
||||||
const paran = stack[stack.length - 2]
|
|
||||||
|
|
||||||
const binExp: ExtendedBinaryExpression = {
|
|
||||||
...innerNode,
|
|
||||||
startExtended: paran.start,
|
|
||||||
endExtended: currentToken.end,
|
|
||||||
} as ExtendedBinaryExpression
|
|
||||||
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(1), [
|
|
||||||
...stack.slice(0, -2),
|
|
||||||
binExp,
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
const left = { ...stack[stack.length - 2] }
|
|
||||||
let start = left.start
|
|
||||||
if (left.type === 'BinaryExpression') {
|
|
||||||
start = left?.startExtended || left.start
|
|
||||||
delete left.startExtended
|
|
||||||
delete left.endExtended
|
|
||||||
}
|
|
||||||
|
|
||||||
const right = { ...stack[stack.length - 1] }
|
|
||||||
let end = right.end
|
|
||||||
if (right.type === 'BinaryExpression') {
|
|
||||||
end = right?.endExtended || right.end
|
|
||||||
delete right.startExtended
|
|
||||||
delete right.endExtended
|
|
||||||
}
|
|
||||||
|
|
||||||
const binExp: BinaryExpression = {
|
|
||||||
type: 'BinaryExpression',
|
|
||||||
operator: currentToken.value,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
left: left as any,
|
|
||||||
right: right as any,
|
|
||||||
}
|
|
||||||
return buildTree(reversePolishNotationTokens.slice(1), [
|
|
||||||
...stack.slice(0, -2),
|
|
||||||
binExp,
|
|
||||||
])
|
|
||||||
}
|
|
||||||
|
|
||||||
export function parseExpression(tokens: Token[]): BinaryExpression {
|
|
||||||
const treeWithMaybeBadTopLevelStartEnd = buildTree(
|
|
||||||
reversePolishNotation(tokens)
|
|
||||||
)
|
|
||||||
const left = treeWithMaybeBadTopLevelStartEnd?.left as any
|
|
||||||
const start = left?.startExtended || treeWithMaybeBadTopLevelStartEnd?.start
|
|
||||||
if (left == undefined || left == null) {
|
|
||||||
throw new KCLSyntaxError(
|
|
||||||
'syntax',
|
|
||||||
tokens.map((token) => [token.start, token.end])
|
|
||||||
) // Add text
|
|
||||||
}
|
|
||||||
delete left.startExtended
|
|
||||||
delete left.endExtended
|
|
||||||
|
|
||||||
const right = treeWithMaybeBadTopLevelStartEnd?.right as any
|
|
||||||
const end = right?.endExtended || treeWithMaybeBadTopLevelStartEnd?.end
|
|
||||||
delete right.startExtended
|
|
||||||
delete right.endExtended
|
|
||||||
|
|
||||||
const tree: BinaryExpression = {
|
|
||||||
...treeWithMaybeBadTopLevelStartEnd,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
left,
|
|
||||||
right,
|
|
||||||
}
|
|
||||||
return tree
|
|
||||||
}
|
|
||||||
|
|
||||||
function _precedence(operator: Token): number {
|
|
||||||
return precedence(operator.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function precedence(operator: string): number {
|
|
||||||
// might be useful for reference to make it match
|
|
||||||
// another commonly used lang https://www.w3schools.com/js/js_precedence.asp
|
|
||||||
if (['+', '-'].includes(operator)) {
|
|
||||||
return 11
|
|
||||||
} else if (['*', '/', '%'].includes(operator)) {
|
|
||||||
return 12
|
|
||||||
} else {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,8 +1,7 @@
|
|||||||
import fs from 'node:fs'
|
import fs from 'node:fs'
|
||||||
|
|
||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import { lexer } from './tokeniser'
|
import { ProgramMemory } from './executor'
|
||||||
import { ProgramMemory, Path, SketchGroup } from './executor'
|
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
import { vi } from 'vitest'
|
import { vi } from 'vitest'
|
||||||
@ -47,7 +46,7 @@ log(5, myVar)`
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const { root } = await enginelessExecutor(abstractSyntaxTree(lexer(code)), {
|
const { root } = await enginelessExecutor(parser_wasm(code), {
|
||||||
root: programMemoryOverride,
|
root: programMemoryOverride,
|
||||||
pendingMemory: {},
|
pendingMemory: {},
|
||||||
})
|
})
|
||||||
@ -463,8 +462,7 @@ async function exe(
|
|||||||
code: string,
|
code: string,
|
||||||
programMemory: ProgramMemory = { root: {}, pendingMemory: {} }
|
programMemory: ProgramMemory = { root: {}, pendingMemory: {} }
|
||||||
) {
|
) {
|
||||||
const tokens = lexer(code)
|
const ast = parser_wasm(code)
|
||||||
const ast = abstractSyntaxTree(tokens)
|
|
||||||
|
|
||||||
const result = await enginelessExecutor(ast, programMemory)
|
const result = await enginelessExecutor(ast, programMemory)
|
||||||
return result
|
return result
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { getNodePathFromSourceRange, getNodeFromPath } from './queryAst'
|
import { getNodePathFromSourceRange, getNodeFromPath } from './queryAst'
|
||||||
import { lexer } from './tokeniser'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -21,7 +20,7 @@ const sk3 = startSketchAt([0, 0])
|
|||||||
lineToSubstringIndex + subStr.length,
|
lineToSubstringIndex + subStr.length,
|
||||||
]
|
]
|
||||||
|
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
const nodePath = getNodePathFromSourceRange(ast, sourceRange)
|
||||||
const { node } = getNodeFromPath<any>(ast, nodePath)
|
const { node } = getNodeFromPath<any>(ast, nodePath)
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import {
|
import {
|
||||||
createLiteral,
|
createLiteral,
|
||||||
createIdentifier,
|
createIdentifier,
|
||||||
@ -14,7 +14,6 @@ import {
|
|||||||
moveValueIntoNewVariable,
|
moveValueIntoNewVariable,
|
||||||
} from './modifyAst'
|
} from './modifyAst'
|
||||||
import { recast } from './recast'
|
import { recast } from './recast'
|
||||||
import { lexer } from './tokeniser'
|
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
import { enginelessExecutor } from '../lib/testHelpers'
|
import { enginelessExecutor } from '../lib/testHelpers'
|
||||||
|
|
||||||
@ -127,7 +126,7 @@ function giveSketchFnCallTagTestHelper(
|
|||||||
// giveSketchFnCallTag inputs and outputs an ast, which is very verbose for testing
|
// giveSketchFnCallTag inputs and outputs an ast, which is very verbose for testing
|
||||||
// this wrapper changes the input and output to code
|
// this wrapper changes the input and output to code
|
||||||
// making it more of an integration test, but easier to read the test intention is the goal
|
// making it more of an integration test, but easier to read the test intention is the goal
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const start = code.indexOf(searchStr)
|
const start = code.indexOf(searchStr)
|
||||||
const range: [number, number] = [start, start + searchStr.length]
|
const range: [number, number] = [start, start + searchStr.length]
|
||||||
const { modifiedAst, tag, isTagExisting } = giveSketchFnCallTag(ast, range)
|
const { modifiedAst, tag, isTagExisting } = giveSketchFnCallTag(ast, range)
|
||||||
@ -195,7 +194,7 @@ const yo = 5 + 6
|
|||||||
const yo2 = hmm([identifierGuy + 5])
|
const yo2 = hmm([identifierGuy + 5])
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('should move a binary expression into a new variable', async () => {
|
it('should move a binary expression into a new variable', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('100 + 100') + 1
|
const startIndex = code.indexOf('100 + 100') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -209,7 +208,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a value into a new variable', async () => {
|
it('should move a value into a new variable', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('2.8') + 1
|
const startIndex = code.indexOf('2.8') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -223,7 +222,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`line([newVar, 0], %)`)
|
expect(newCode).toContain(`line([newVar, 0], %)`)
|
||||||
})
|
})
|
||||||
it('should move a callExpression into a new variable', async () => {
|
it('should move a callExpression into a new variable', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('def(')
|
const startIndex = code.indexOf('def(')
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -237,7 +236,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a binary expression with call expression into a new variable', async () => {
|
it('should move a binary expression with call expression into a new variable', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('jkl(') + 1
|
const startIndex = code.indexOf('jkl(') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
@ -251,7 +250,7 @@ show(part001)`
|
|||||||
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
expect(newCode).toContain(`angledLine([newVar, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('should move a identifier into a new variable', async () => {
|
it('should move a identifier into a new variable', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const startIndex = code.indexOf('identifierGuy +') + 1
|
const startIndex = code.indexOf('identifierGuy +') + 1
|
||||||
const { modifiedAst } = moveValueIntoNewVariable(
|
const { modifiedAst } = moveValueIntoNewVariable(
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import {
|
import {
|
||||||
findAllPreviousVariables,
|
findAllPreviousVariables,
|
||||||
isNodeSafeToReplace,
|
isNodeSafeToReplace,
|
||||||
@ -37,7 +37,7 @@ const variableBelowShouldNotBeIncluded = 3
|
|||||||
|
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const rangeStart = code.indexOf('// selection-range-7ish-before-this') - 7
|
const rangeStart = code.indexOf('// selection-range-7ish-before-this') - 7
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
|
|
||||||
const { variables, bodyPath, insertIndex } = findAllPreviousVariables(
|
const { variables, bodyPath, insertIndex } = findAllPreviousVariables(
|
||||||
@ -71,7 +71,7 @@ const yo = 5 + 6
|
|||||||
const yo2 = hmm([identifierGuy + 5])
|
const yo2 = hmm([identifierGuy + 5])
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('find a safe binaryExpression', () => {
|
it('find a safe binaryExpression', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('100 + 100') + 2
|
const rangeStart = code.indexOf('100 + 100') + 2
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -85,7 +85,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find a safe Identifier', () => {
|
it('find a safe Identifier', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('abc')
|
const rangeStart = code.indexOf('abc')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -93,7 +93,7 @@ show(part001)`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe('abc')
|
expect(code.slice(result.value.start, result.value.end)).toBe('abc')
|
||||||
})
|
})
|
||||||
it('find a safe CallExpression', () => {
|
it('find a safe CallExpression', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('def')
|
const rangeStart = code.indexOf('def')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -107,7 +107,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find an UNsafe CallExpression, as it has a PipeSubstitution', () => {
|
it('find an UNsafe CallExpression, as it has a PipeSubstitution', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('ghi')
|
const rangeStart = code.indexOf('ghi')
|
||||||
const range: [number, number] = [rangeStart, rangeStart]
|
const range: [number, number] = [rangeStart, rangeStart]
|
||||||
const result = isNodeSafeToReplace(ast, range)
|
const result = isNodeSafeToReplace(ast, range)
|
||||||
@ -116,7 +116,7 @@ show(part001)`
|
|||||||
expect(code.slice(result.value.start, result.value.end)).toBe('ghi(%)')
|
expect(code.slice(result.value.start, result.value.end)).toBe('ghi(%)')
|
||||||
})
|
})
|
||||||
it('find an UNsafe Identifier, as it is a callee', () => {
|
it('find an UNsafe Identifier, as it is a callee', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('ine([2.8,')
|
const rangeStart = code.indexOf('ine([2.8,')
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(false)
|
expect(result.isSafe).toBe(false)
|
||||||
@ -126,7 +126,7 @@ show(part001)`
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
it("find a safe BinaryExpression that's assigned to a variable", () => {
|
it("find a safe BinaryExpression that's assigned to a variable", () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('5 + 6') + 1
|
const rangeStart = code.indexOf('5 + 6') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -140,7 +140,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`const yo = replaceName`)
|
expect(outCode).toContain(`const yo = replaceName`)
|
||||||
})
|
})
|
||||||
it('find a safe BinaryExpression that has a CallExpression within', () => {
|
it('find a safe BinaryExpression that has a CallExpression within', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('jkl') + 1
|
const rangeStart = code.indexOf('jkl') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -156,7 +156,7 @@ show(part001)`
|
|||||||
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
expect(outCode).toContain(`angledLine([replaceName, 3.09], %)`)
|
||||||
})
|
})
|
||||||
it('find a safe BinaryExpression within a CallExpression', () => {
|
it('find a safe BinaryExpression within a CallExpression', () => {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const rangeStart = code.indexOf('identifierGuy') + 1
|
const rangeStart = code.indexOf('identifierGuy') + 1
|
||||||
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
const result = isNodeSafeToReplace(ast, [rangeStart, rangeStart])
|
||||||
expect(result.isSafe).toBe(true)
|
expect(result.isSafe).toBe(true)
|
||||||
@ -204,7 +204,7 @@ show(part001)`
|
|||||||
it('finds the second line when cursor is put at the end', () => {
|
it('finds the second line when cursor is put at the end', () => {
|
||||||
const searchLn = `line([0.94, 2.61], %)`
|
const searchLn = `line([0.94, 2.61], %)`
|
||||||
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
['body', ''],
|
['body', ''],
|
||||||
@ -219,7 +219,7 @@ show(part001)`
|
|||||||
it('finds the last line when cursor is put at the end', () => {
|
it('finds the last line when cursor is put at the end', () => {
|
||||||
const searchLn = `line([-0.21, -1.4], %)`
|
const searchLn = `line([-0.21, -1.4], %)`
|
||||||
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
const sourceIndex = code.indexOf(searchLn) + searchLn.length
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
const result = getNodePathFromSourceRange(ast, [sourceIndex, sourceIndex])
|
||||||
const expected = [
|
const expected = [
|
||||||
['body', ''],
|
['body', ''],
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { recast } from './recast'
|
import { recast } from './recast'
|
||||||
import { abstractSyntaxTree } from './abstractSyntaxTree'
|
import { parser_wasm } from './abstractSyntaxTree'
|
||||||
import { Program } from './abstractSyntaxTreeTypes'
|
import { Program } from './abstractSyntaxTreeTypes'
|
||||||
import { lexer, Token } from './tokeniser'
|
|
||||||
import fs from 'node:fs'
|
import fs from 'node:fs'
|
||||||
import { initPromise } from './rust'
|
import { initPromise } from './rust'
|
||||||
|
|
||||||
@ -342,11 +341,7 @@ describe('it recasts binary expression using brackets where needed', () => {
|
|||||||
|
|
||||||
// helpers
|
// helpers
|
||||||
|
|
||||||
function code2ast(code: string): { ast: Program; tokens: Token[] } {
|
function code2ast(code: string): { ast: Program } {
|
||||||
const tokens = lexer(code)
|
const ast = parser_wasm(code)
|
||||||
const ast = abstractSyntaxTree(tokens)
|
return { ast }
|
||||||
return {
|
|
||||||
ast,
|
|
||||||
tokens,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,13 @@
|
|||||||
import { Program } from './abstractSyntaxTreeTypes'
|
import { Program } from './abstractSyntaxTreeTypes'
|
||||||
import { recast_js } from '../wasm-lib/pkg/wasm_lib'
|
import { recast_js } from '../wasm-lib/pkg/wasm_lib'
|
||||||
|
|
||||||
export const recast = (ast: Program): string => recast_js(JSON.stringify(ast))
|
export const recast = (ast: Program): string => {
|
||||||
|
try {
|
||||||
|
const s: string = recast_js(JSON.stringify(ast))
|
||||||
|
return s
|
||||||
|
} catch (e) {
|
||||||
|
// TODO: do something real with the error.
|
||||||
|
console.log('recast', e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -5,8 +5,7 @@ import {
|
|||||||
getYComponent,
|
getYComponent,
|
||||||
getXComponent,
|
getXComponent,
|
||||||
} from './sketch'
|
} from './sketch'
|
||||||
import { lexer } from '../tokeniser'
|
import { parser_wasm } from '../abstractSyntaxTree'
|
||||||
import { abstractSyntaxTree } from '../abstractSyntaxTree'
|
|
||||||
import { getNodePathFromSourceRange } from '../queryAst'
|
import { getNodePathFromSourceRange } from '../queryAst'
|
||||||
import { recast } from '../recast'
|
import { recast } from '../recast'
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
@ -106,7 +105,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const code = genCode(lineToChange)
|
const code = genCode(lineToChange)
|
||||||
const expectedCode = genCode(lineAfterChange)
|
const expectedCode = genCode(lineAfterChange)
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(lineToChange)
|
const sourceStart = code.indexOf(lineToChange)
|
||||||
const { modifiedAst } = changeSketchArguments(
|
const { modifiedAst } = changeSketchArguments(
|
||||||
@ -144,7 +143,7 @@ const mySketch001 = startSketchAt([0, 0])
|
|||||||
|> lineTo([-1.59, -1.54], %)
|
|> lineTo([-1.59, -1.54], %)
|
||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(lineToChange)
|
const sourceStart = code.indexOf(lineToChange)
|
||||||
const { modifiedAst } = addNewSketchLn({
|
const { modifiedAst } = addNewSketchLn({
|
||||||
@ -183,7 +182,7 @@ describe('testing addTagForSketchOnFace', () => {
|
|||||||
|> lineTo([0.46, -5.82], %)
|
|> lineTo([0.46, -5.82], %)
|
||||||
show(mySketch001)`
|
show(mySketch001)`
|
||||||
const code = genCode(originalLine)
|
const code = genCode(originalLine)
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const sourceStart = code.indexOf(originalLine)
|
const sourceStart = code.indexOf(originalLine)
|
||||||
const sourceRange: [number, number] = [
|
const sourceRange: [number, number] = [
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { abstractSyntaxTree } from '../abstractSyntaxTree'
|
import { parser_wasm } from '../abstractSyntaxTree'
|
||||||
import { SketchGroup } from '../executor'
|
import { SketchGroup } from '../executor'
|
||||||
import { lexer } from '../tokeniser'
|
|
||||||
import {
|
import {
|
||||||
ConstraintType,
|
ConstraintType,
|
||||||
getTransformInfos,
|
getTransformInfos,
|
||||||
@ -32,8 +31,7 @@ async function testingSwapSketchFnCall({
|
|||||||
type: 'default',
|
type: 'default',
|
||||||
range: [startIndex, startIndex + callToSwap.length],
|
range: [startIndex, startIndex + callToSwap.length],
|
||||||
}
|
}
|
||||||
const tokens = lexer(inputCode)
|
const ast = parser_wasm(inputCode)
|
||||||
const ast = abstractSyntaxTree(tokens)
|
|
||||||
const programMemory = await enginelessExecutor(ast)
|
const programMemory = await enginelessExecutor(ast)
|
||||||
const selections = {
|
const selections = {
|
||||||
codeBasedSelections: [range],
|
codeBasedSelections: [range],
|
||||||
@ -383,9 +381,7 @@ const part001 = startSketchAt([0, 0.04]) // segment-in-start
|
|||||||
|> xLine(3.54, %)
|
|> xLine(3.54, %)
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('normal case works', async () => {
|
it('normal case works', async () => {
|
||||||
const programMemory = await enginelessExecutor(
|
const programMemory = await enginelessExecutor(parser_wasm(code))
|
||||||
abstractSyntaxTree(lexer(code))
|
|
||||||
)
|
|
||||||
const index = code.indexOf('// normal-segment') - 7
|
const index = code.indexOf('// normal-segment') - 7
|
||||||
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
||||||
programMemory.root['part001'] as SketchGroup,
|
programMemory.root['part001'] as SketchGroup,
|
||||||
@ -398,9 +394,7 @@ show(part001)`
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
it('verify it works when the segment is in the `start` property', async () => {
|
it('verify it works when the segment is in the `start` property', async () => {
|
||||||
const programMemory = await enginelessExecutor(
|
const programMemory = await enginelessExecutor(parser_wasm(code))
|
||||||
abstractSyntaxTree(lexer(code))
|
|
||||||
)
|
|
||||||
const index = code.indexOf('// segment-in-start') - 7
|
const index = code.indexOf('// segment-in-start') - 7
|
||||||
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
const { __geoMeta, ...segment } = getSketchSegmentFromSourceRange(
|
||||||
programMemory.root['part001'] as SketchGroup,
|
programMemory.root['part001'] as SketchGroup,
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { abstractSyntaxTree } from '../abstractSyntaxTree'
|
import { parser_wasm } from '../abstractSyntaxTree'
|
||||||
import { Value } from '../abstractSyntaxTreeTypes'
|
import { Value } from '../abstractSyntaxTreeTypes'
|
||||||
import { lexer } from '../tokeniser'
|
|
||||||
import {
|
import {
|
||||||
getConstraintType,
|
getConstraintType,
|
||||||
getTransformInfos,
|
getTransformInfos,
|
||||||
@ -64,7 +63,7 @@ describe('testing getConstraintType', () => {
|
|||||||
function getConstraintTypeFromSourceHelper(
|
function getConstraintTypeFromSourceHelper(
|
||||||
code: string
|
code: string
|
||||||
): ReturnType<typeof getConstraintType> {
|
): ReturnType<typeof getConstraintType> {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const args = (ast.body[0] as any).expression.arguments[0].elements as [
|
const args = (ast.body[0] as any).expression.arguments[0].elements as [
|
||||||
Value,
|
Value,
|
||||||
Value
|
Value
|
||||||
@ -75,7 +74,7 @@ function getConstraintTypeFromSourceHelper(
|
|||||||
function getConstraintTypeFromSourceHelper2(
|
function getConstraintTypeFromSourceHelper2(
|
||||||
code: string
|
code: string
|
||||||
): ReturnType<typeof getConstraintType> {
|
): ReturnType<typeof getConstraintType> {
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const arg = (ast.body[0] as any).expression.arguments[0] as Value
|
const arg = (ast.body[0] as any).expression.arguments[0] as Value
|
||||||
const fnName = (ast.body[0] as any).expression.callee.name as TooTip
|
const fnName = (ast.body[0] as any).expression.callee.name as TooTip
|
||||||
return getConstraintType(arg, fnName)
|
return getConstraintType(arg, fnName)
|
||||||
@ -198,7 +197,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
|> yLine(segLen('seg01', %), %) // ln-yLineTo-free should convert to yLine
|
|> yLine(segLen('seg01', %), %) // ln-yLineTo-free should convert to yLine
|
||||||
show(part001)`
|
show(part001)`
|
||||||
it('should transform the ast', async () => {
|
it('should transform the ast', async () => {
|
||||||
const ast = abstractSyntaxTree(lexer(inputScript))
|
const ast = parser_wasm(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('//'))
|
.filter((ln) => ln.includes('//'))
|
||||||
@ -283,7 +282,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
|> xLineTo(myVar3, %) // select for horizontal constraint 10
|
|> xLineTo(myVar3, %) // select for horizontal constraint 10
|
||||||
|> angledLineToY([301, myVar], %) // select for vertical constraint 10
|
|> angledLineToY([301, myVar], %) // select for vertical constraint 10
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const ast = abstractSyntaxTree(lexer(inputScript))
|
const ast = parser_wasm(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('// select for horizontal constraint'))
|
.filter((ln) => ln.includes('// select for horizontal constraint'))
|
||||||
@ -340,7 +339,7 @@ const part001 = startSketchAt([0, 0])
|
|||||||
|> angledLineToX([333, myVar3], %) // select for horizontal constraint 10
|
|> angledLineToX([333, myVar3], %) // select for horizontal constraint 10
|
||||||
|> yLineTo(myVar, %) // select for vertical constraint 10
|
|> yLineTo(myVar, %) // select for vertical constraint 10
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const ast = abstractSyntaxTree(lexer(inputScript))
|
const ast = parser_wasm(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) => ln.includes('// select for vertical constraint'))
|
.filter((ln) => ln.includes('// select for vertical constraint'))
|
||||||
@ -430,7 +429,7 @@ async function helperThing(
|
|||||||
linesOfInterest: string[],
|
linesOfInterest: string[],
|
||||||
constraint: ConstraintType
|
constraint: ConstraintType
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const ast = abstractSyntaxTree(lexer(inputScript))
|
const ast = parser_wasm(inputScript)
|
||||||
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
const selectionRanges: Selections['codeBasedSelections'] = inputScript
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((ln) =>
|
.filter((ln) =>
|
||||||
@ -493,7 +492,7 @@ const part001 = startSketchAt([-0.01, -0.05])
|
|||||||
|> xLine(-3.43 + 0, %) // full
|
|> xLine(-3.43 + 0, %) // full
|
||||||
|> angledLineOfXLength([243 + 0, 1.2 + 0], %) // full
|
|> angledLineOfXLength([243 + 0, 1.2 + 0], %) // full
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const ast = abstractSyntaxTree(lexer(code))
|
const ast = parser_wasm(code)
|
||||||
const constraintLevels: ReturnType<
|
const constraintLevels: ReturnType<
|
||||||
typeof getConstraintLevelFromSourceRange
|
typeof getConstraintLevelFromSourceRange
|
||||||
>[] = ['full', 'partial', 'free']
|
>[] = ['full', 'partial', 'free']
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import { abstractSyntaxTree } from '../abstractSyntaxTree'
|
import { parser_wasm } from '../abstractSyntaxTree'
|
||||||
import { enginelessExecutor } from '../../lib/testHelpers'
|
import { enginelessExecutor } from '../../lib/testHelpers'
|
||||||
import { lexer } from '../tokeniser'
|
|
||||||
import { initPromise } from '../rust'
|
import { initPromise } from '../rust'
|
||||||
|
|
||||||
beforeAll(() => initPromise)
|
beforeAll(() => initPromise)
|
||||||
@ -18,13 +17,9 @@ describe('testing angledLineThatIntersects', () => {
|
|||||||
}, %)
|
}, %)
|
||||||
const intersect = segEndX('yo2', part001)
|
const intersect = segEndX('yo2', part001)
|
||||||
show(part001)`
|
show(part001)`
|
||||||
const { root } = await enginelessExecutor(
|
const { root } = await enginelessExecutor(parser_wasm(code('-1')))
|
||||||
abstractSyntaxTree(lexer(code('-1')))
|
|
||||||
)
|
|
||||||
expect(root.intersect.value).toBe(1 + Math.sqrt(2))
|
expect(root.intersect.value).toBe(1 + Math.sqrt(2))
|
||||||
const { root: noOffset } = await enginelessExecutor(
|
const { root: noOffset } = await enginelessExecutor(parser_wasm(code('0')))
|
||||||
abstractSyntaxTree(lexer(code('0')))
|
|
||||||
)
|
|
||||||
expect(noOffset.intersect.value).toBeCloseTo(1)
|
expect(noOffset.intersect.value).toBeCloseTo(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -21,9 +21,23 @@ export interface Token {
|
|||||||
|
|
||||||
export async function asyncLexer(str: string): Promise<Token[]> {
|
export async function asyncLexer(str: string): Promise<Token[]> {
|
||||||
await initPromise
|
await initPromise
|
||||||
return JSON.parse(lexer_js(str)) as Token[]
|
try {
|
||||||
|
const tokens: Token[] = lexer_js(str)
|
||||||
|
return tokens
|
||||||
|
} catch (e) {
|
||||||
|
// TODO: do something real with the error.
|
||||||
|
console.log('lexer', e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function lexer(str: string): Token[] {
|
export function lexer(str: string): Token[] {
|
||||||
return JSON.parse(lexer_js(str)) as Token[]
|
try {
|
||||||
|
const tokens: Token[] = lexer_js(str)
|
||||||
|
return tokens
|
||||||
|
} catch (e) {
|
||||||
|
// TODO: do something real with the error.
|
||||||
|
console.log('lexer', e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,6 +39,6 @@ export async function exportSave(data: ArrayBuffer) {
|
|||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// TODO: do something real with the error.
|
// TODO: do something real with the error.
|
||||||
console.log(e)
|
console.log('export', e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { create } from 'zustand'
|
import { create } from 'zustand'
|
||||||
import { persist } from 'zustand/middleware'
|
import { persist } from 'zustand/middleware'
|
||||||
import { addLineHighlight, EditorView } from './editor/highlightextension'
|
import { addLineHighlight, EditorView } from './editor/highlightextension'
|
||||||
import { abstractSyntaxTree } from './lang/abstractSyntaxTree'
|
import { parser_wasm } from './lang/abstractSyntaxTree'
|
||||||
import { Program } from './lang/abstractSyntaxTreeTypes'
|
import { Program } from './lang/abstractSyntaxTreeTypes'
|
||||||
import { getNodeFromPath } from './lang/queryAst'
|
import { getNodeFromPath } from './lang/queryAst'
|
||||||
import {
|
import {
|
||||||
@ -321,9 +321,7 @@ export const useStore = create<StoreState>()(
|
|||||||
},
|
},
|
||||||
updateAst: async (ast, { focusPath, callBack = () => {} } = {}) => {
|
updateAst: async (ast, { focusPath, callBack = () => {} } = {}) => {
|
||||||
const newCode = recast(ast)
|
const newCode = recast(ast)
|
||||||
const astWithUpdatedSource = abstractSyntaxTree(
|
const astWithUpdatedSource = parser_wasm(newCode)
|
||||||
await asyncLexer(newCode)
|
|
||||||
)
|
|
||||||
callBack(astWithUpdatedSource)
|
callBack(astWithUpdatedSource)
|
||||||
|
|
||||||
set({ ast: astWithUpdatedSource, code: newCode })
|
set({ ast: astWithUpdatedSource, code: newCode })
|
||||||
@ -361,7 +359,7 @@ export const useStore = create<StoreState>()(
|
|||||||
},
|
},
|
||||||
formatCode: async () => {
|
formatCode: async () => {
|
||||||
const code = get().code
|
const code = get().code
|
||||||
const ast = abstractSyntaxTree(await asyncLexer(code))
|
const ast = parser_wasm(code)
|
||||||
const newCode = recast(ast)
|
const newCode = recast(ast)
|
||||||
set({ code: newCode, ast })
|
set({ code: newCode, ast })
|
||||||
},
|
},
|
||||||
|
148
src/wasm-lib/Cargo.lock
generated
148
src/wasm-lib/Cargo.lock
generated
@ -2,6 +2,21 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "addr2line"
|
||||||
|
version = "0.20.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
|
||||||
|
dependencies = [
|
||||||
|
"gimli",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler"
|
||||||
|
version = "1.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "0.7.20"
|
version = "0.7.20"
|
||||||
@ -11,6 +26,27 @@ dependencies = [
|
|||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyhow"
|
||||||
|
version = "1.0.75"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backtrace"
|
||||||
|
version = "0.3.68"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
|
||||||
|
dependencies = [
|
||||||
|
"addr2line",
|
||||||
|
"cc",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
"object",
|
||||||
|
"rustc-demangle",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bincode"
|
name = "bincode"
|
||||||
version = "1.3.3"
|
version = "1.3.3"
|
||||||
@ -26,18 +62,49 @@ version = "3.12.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
|
checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cc"
|
||||||
|
version = "1.0.79"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diff"
|
||||||
|
version = "0.1.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fnv"
|
name = "fnv"
|
||||||
version = "1.0.7"
|
version = "1.0.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gimli"
|
||||||
|
version = "0.27.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gloo-utils"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa"
|
||||||
|
dependencies = [
|
||||||
|
"js-sys",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"wasm-bindgen",
|
||||||
|
"web-sys",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.5"
|
version = "1.0.5"
|
||||||
@ -59,6 +126,12 @@ version = "1.4.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libc"
|
||||||
|
version = "0.2.147"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.17"
|
version = "0.4.17"
|
||||||
@ -74,12 +147,40 @@ version = "2.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.7.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
|
||||||
|
dependencies = [
|
||||||
|
"adler",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.31.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.17.1"
|
version = "1.17.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pretty_assertions"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
|
||||||
|
dependencies = [
|
||||||
|
"diff",
|
||||||
|
"yansi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.64"
|
version = "1.0.64"
|
||||||
@ -115,6 +216,12 @@ version = "0.6.28"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
|
checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-demangle"
|
||||||
|
version = "0.1.23"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.12"
|
version = "1.0.12"
|
||||||
@ -186,6 +293,26 @@ dependencies = [
|
|||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror"
|
||||||
|
version = "1.0.47"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f"
|
||||||
|
dependencies = [
|
||||||
|
"thiserror-impl",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "thiserror-impl"
|
||||||
|
version = "1.0.47"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn 2.0.26",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.6"
|
version = "1.0.6"
|
||||||
@ -250,11 +377,32 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
|
|||||||
name = "wasm-lib"
|
name = "wasm-lib"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"backtrace",
|
||||||
"bincode",
|
"bincode",
|
||||||
|
"gloo-utils",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
|
"pretty_assertions",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"serde-wasm-bindgen",
|
"serde-wasm-bindgen",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"thiserror",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "web-sys"
|
||||||
|
version = "0.3.64"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b"
|
||||||
|
dependencies = [
|
||||||
|
"js-sys",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yansi"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
|
||||||
|
@ -8,10 +8,21 @@ edition = "2021"
|
|||||||
crate-type = ["cdylib"]
|
crate-type = ["cdylib"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anyhow = "1.0.75"
|
||||||
|
backtrace = "0.3"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
|
gloo-utils = "0.2.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
regex = "1.7.1"
|
regex = "1.7.1"
|
||||||
serde = {version = "1.0.152", features = ["derive"] }
|
serde = {version = "1.0.152", features = ["derive"] }
|
||||||
serde-wasm-bindgen = "0.3.1"
|
serde-wasm-bindgen = "0.3.0"
|
||||||
serde_json = "1.0.93"
|
serde_json = "1.0.93"
|
||||||
wasm-bindgen = "0.2.78"
|
thiserror = "1.0.47"
|
||||||
|
wasm-bindgen = "0.2.87"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
panic = "abort"
|
||||||
|
debug = true
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
pretty_assertions = "1.4.0"
|
||||||
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
|||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Program {
|
pub struct Program {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
@ -12,7 +12,7 @@ pub struct Program {
|
|||||||
pub non_code_meta: NoneCodeMeta,
|
pub non_code_meta: NoneCodeMeta,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum BodyItem {
|
pub enum BodyItem {
|
||||||
ExpressionStatement(ExpressionStatement),
|
ExpressionStatement(ExpressionStatement),
|
||||||
@ -20,7 +20,7 @@ pub enum BodyItem {
|
|||||||
ReturnStatement(ReturnStatement),
|
ReturnStatement(ReturnStatement),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
Literal(Box<Literal>),
|
Literal(Box<Literal>),
|
||||||
@ -36,7 +36,7 @@ pub enum Value {
|
|||||||
UnaryExpression(Box<UnaryExpression>),
|
UnaryExpression(Box<UnaryExpression>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum BinaryPart {
|
pub enum BinaryPart {
|
||||||
Literal(Box<Literal>),
|
Literal(Box<Literal>),
|
||||||
@ -46,14 +46,15 @@ pub enum BinaryPart {
|
|||||||
UnaryExpression(Box<UnaryExpression>),
|
UnaryExpression(Box<UnaryExpression>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
|
#[serde(tag = "type")]
|
||||||
pub struct NoneCodeNode {
|
pub struct NoneCodeNode {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub value: String,
|
pub value: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
#[derive(Debug, Clone, Serialize, PartialEq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct NoneCodeMeta {
|
pub struct NoneCodeMeta {
|
||||||
pub none_code_nodes: HashMap<usize, NoneCodeNode>,
|
pub none_code_nodes: HashMap<usize, NoneCodeNode>,
|
||||||
@ -86,14 +87,14 @@ impl<'de> Deserialize<'de> for NoneCodeMeta {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct ExpressionStatement {
|
pub struct ExpressionStatement {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub expression: Value,
|
pub expression: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct CallExpression {
|
pub struct CallExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -102,7 +103,7 @@ pub struct CallExpression {
|
|||||||
pub optional: bool,
|
pub optional: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct VariableDeclaration {
|
pub struct VariableDeclaration {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -110,7 +111,8 @@ pub struct VariableDeclaration {
|
|||||||
pub kind: String, // Change to enum if there are specific values
|
pub kind: String, // Change to enum if there are specific values
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
|
#[serde(tag = "type")]
|
||||||
pub struct VariableDeclarator {
|
pub struct VariableDeclarator {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -118,7 +120,7 @@ pub struct VariableDeclarator {
|
|||||||
pub init: Value,
|
pub init: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct Literal {
|
pub struct Literal {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -126,34 +128,36 @@ pub struct Literal {
|
|||||||
pub raw: String,
|
pub raw: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
|
#[serde(tag = "type")]
|
||||||
pub struct Identifier {
|
pub struct Identifier {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct PipeSubstitution {
|
pub struct PipeSubstitution {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct ArrayExpression {
|
pub struct ArrayExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub elements: Vec<Value>,
|
pub elements: Vec<Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct ObjectExpression {
|
pub struct ObjectExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub properties: Vec<ObjectProperty>,
|
pub properties: Vec<ObjectProperty>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
|
#[serde(tag = "type")]
|
||||||
pub struct ObjectProperty {
|
pub struct ObjectProperty {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -161,37 +165,37 @@ pub struct ObjectProperty {
|
|||||||
pub value: Value,
|
pub value: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum MemberObject {
|
pub enum MemberObject {
|
||||||
MemberExpression(Box<MemberExpression>),
|
MemberExpression(Box<MemberExpression>),
|
||||||
Identifier(Box<Identifier>),
|
Identifier(Box<Identifier>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum MemberProperty {
|
pub enum LiteralIdentifier {
|
||||||
Identifier(Box<Identifier>),
|
Identifier(Box<Identifier>),
|
||||||
Literal(Box<Literal>),
|
Literal(Box<Literal>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct MemberExpression {
|
pub struct MemberExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
pub object: MemberObject,
|
pub object: MemberObject,
|
||||||
pub property: MemberProperty,
|
pub property: LiteralIdentifier,
|
||||||
pub computed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct ObjectKeyInfo {
|
|
||||||
pub key: Box<dyn std::any::Any>,
|
|
||||||
pub index: usize,
|
|
||||||
pub computed: bool,
|
pub computed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||||
|
pub struct ObjectKeyInfo {
|
||||||
|
pub key: LiteralIdentifier,
|
||||||
|
pub index: usize,
|
||||||
|
pub computed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct BinaryExpression {
|
pub struct BinaryExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -200,7 +204,7 @@ pub struct BinaryExpression {
|
|||||||
pub right: BinaryPart,
|
pub right: BinaryPart,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct UnaryExpression {
|
pub struct UnaryExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -208,7 +212,7 @@ pub struct UnaryExpression {
|
|||||||
pub argument: BinaryPart,
|
pub argument: BinaryPart,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct PipeExpression {
|
pub struct PipeExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
@ -217,7 +221,8 @@ pub struct PipeExpression {
|
|||||||
pub non_code_meta: NoneCodeMeta,
|
pub non_code_meta: NoneCodeMeta,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
|
|
||||||
pub struct FunctionExpression {
|
pub struct FunctionExpression {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -226,8 +231,8 @@ pub struct FunctionExpression {
|
|||||||
pub body: BlockStatement,
|
pub body: BlockStatement,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase", tag = "type")]
|
||||||
pub struct BlockStatement {
|
pub struct BlockStatement {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
||||||
@ -235,7 +240,7 @@ pub struct BlockStatement {
|
|||||||
pub non_code_meta: NoneCodeMeta,
|
pub non_code_meta: NoneCodeMeta,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
|
||||||
pub struct ReturnStatement {
|
pub struct ReturnStatement {
|
||||||
pub start: usize,
|
pub start: usize,
|
||||||
pub end: usize,
|
pub end: usize,
|
35
src/wasm-lib/src/errors.rs
Normal file
35
src/wasm-lib/src/errors.rs
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(tag = "kind", rename_all = "snake_case")]
|
||||||
|
pub enum KclError {
|
||||||
|
#[error("syntax: {0:?}")]
|
||||||
|
Syntax(KclErrorDetails),
|
||||||
|
#[error("semantic: {0:?}")]
|
||||||
|
Semantic(KclErrorDetails),
|
||||||
|
#[error("type: {0:?}")]
|
||||||
|
Type(KclErrorDetails),
|
||||||
|
#[error("unimplemented: {0:?}")]
|
||||||
|
Unimplemented(KclErrorDetails),
|
||||||
|
#[error("value already defined: {0:?}")]
|
||||||
|
ValueAlreadyDefined(KclErrorDetails),
|
||||||
|
#[error("undefined value: {0:?}")]
|
||||||
|
UndefinedValue(KclErrorDetails),
|
||||||
|
#[error("invalid expression: {0:?}")]
|
||||||
|
InvalidExpression(crate::math_parser::MathExpression),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub struct KclErrorDetails {
|
||||||
|
#[serde(rename = "sourceRanges")]
|
||||||
|
pub source_ranges: Vec<[i32; 2]>,
|
||||||
|
#[serde(rename = "msg")]
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<KclError> for String {
|
||||||
|
fn from(error: KclError) -> Self {
|
||||||
|
serde_json::to_string(&error).unwrap()
|
||||||
|
}
|
||||||
|
}
|
@ -1,4 +1,7 @@
|
|||||||
mod abstract_syntax_tree;
|
mod abstract_syntax_tree_types;
|
||||||
|
mod errors;
|
||||||
mod export;
|
mod export;
|
||||||
|
mod math_parser;
|
||||||
|
mod parser;
|
||||||
mod recast;
|
mod recast;
|
||||||
mod tokeniser;
|
mod tokeniser;
|
||||||
|
1024
src/wasm-lib/src/math_parser.rs
Normal file
1024
src/wasm-lib/src/math_parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
2785
src/wasm-lib/src/parser.rs
Normal file
2785
src/wasm-lib/src/parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -2,9 +2,9 @@
|
|||||||
//! The inverse of parsing (which generates an AST from the source code)
|
//! The inverse of parsing (which generates an AST from the source code)
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
use crate::abstract_syntax_tree::{
|
use crate::abstract_syntax_tree_types::{
|
||||||
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, FunctionExpression,
|
ArrayExpression, BinaryExpression, BinaryPart, BodyItem, CallExpression, FunctionExpression,
|
||||||
Literal, MemberExpression, MemberObject, MemberProperty, ObjectExpression, PipeExpression,
|
Literal, LiteralIdentifier, MemberExpression, MemberObject, ObjectExpression, PipeExpression,
|
||||||
Program, UnaryExpression, Value,
|
Program, UnaryExpression, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -212,14 +212,14 @@ fn recast_argument(argument: Value, indentation: String, is_in_pipe_expression:
|
|||||||
|
|
||||||
fn recast_member_expression(expression: MemberExpression) -> String {
|
fn recast_member_expression(expression: MemberExpression) -> String {
|
||||||
let key_str = match expression.property {
|
let key_str = match expression.property {
|
||||||
MemberProperty::Identifier(identifier) => {
|
LiteralIdentifier::Identifier(identifier) => {
|
||||||
if expression.computed {
|
if expression.computed {
|
||||||
format!("[{}]", &(*identifier.name))
|
format!("[{}]", &(*identifier.name))
|
||||||
} else {
|
} else {
|
||||||
format!(".{}", &(*identifier.name))
|
format!(".{}", &(*identifier.name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
MemberProperty::Literal(lit) => format!("[{}]", &(*lit.raw)),
|
LiteralIdentifier::Literal(lit) => format!("[{}]", &(*lit.raw)),
|
||||||
};
|
};
|
||||||
|
|
||||||
match expression.object {
|
match expression.object {
|
||||||
@ -414,16 +414,10 @@ extern "C" {
|
|||||||
// wasm_bindgen wrapper for recast
|
// wasm_bindgen wrapper for recast
|
||||||
// test for this function and by extension the recaster are done in javascript land src/lang/recast.test.ts
|
// test for this function and by extension the recaster are done in javascript land src/lang/recast.test.ts
|
||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub fn recast_js(json_str: &str) -> String {
|
pub fn recast_js(json_str: &str) -> Result<JsValue, JsError> {
|
||||||
// deserialize the ast from a stringified json
|
// deserialize the ast from a stringified json
|
||||||
let result: Result<Program, _> = serde_json::from_str(json_str);
|
let program: Program = serde_json::from_str(json_str).map_err(JsError::from)?;
|
||||||
let ast = match result {
|
|
||||||
Ok(ast) => ast,
|
|
||||||
Err(e) => {
|
|
||||||
log(e.to_string().as_str());
|
|
||||||
panic!("error: {}", e)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
recast(ast, "".to_string(), false)
|
let result = recast(program, "".to_string(), false);
|
||||||
|
Ok(serde_wasm_bindgen::to_value(&result)?)
|
||||||
}
|
}
|
||||||
|
@ -262,7 +262,7 @@ fn return_token_at_index(str: &str, start_index: usize) -> Option<Token> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lexer(str: &str) -> Vec<Token> {
|
pub fn lexer(str: &str) -> Vec<Token> {
|
||||||
fn recursively_tokenise(
|
fn recursively_tokenise(
|
||||||
str: &str,
|
str: &str,
|
||||||
current_index: usize,
|
current_index: usize,
|
||||||
@ -288,14 +288,13 @@ fn lexer(str: &str) -> Vec<Token> {
|
|||||||
#[wasm_bindgen]
|
#[wasm_bindgen]
|
||||||
pub fn lexer_js(str: &str) -> Result<JsValue, JsError> {
|
pub fn lexer_js(str: &str) -> Result<JsValue, JsError> {
|
||||||
let tokens = lexer(str);
|
let tokens = lexer(str);
|
||||||
serde_json::to_string(&tokens)
|
Ok(serde_wasm_bindgen::to_value(&tokens)?)
|
||||||
.map_err(JsError::from)
|
|
||||||
.map(|s| JsValue::from_str(&s))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn is_number_test() {
|
fn is_number_test() {
|
||||||
|
Reference in New Issue
Block a user