diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 34b66ebd3..17bbb7740 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -11,7 +11,10 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
- node-version: '18.x'
+ node-version: '16.x'
- run: yarn install
+ - run: yarn build:wasm:ci
+ - run: yarn simpleserver:ci
- run: yarn test:nowatch
- run: yarn test:cov
+ - run: yarn test:rust
diff --git a/.gitignore b/.gitignore
index 4d29575de..01e21a7c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,3 +21,7 @@
npm-debug.log*
yarn-debug.log*
yarn-error.log*
+
+# rust
+src/wasm-lib/target
+public/wasm_lib_bg.wasm
diff --git a/README.md b/README.md
index 11e8f59b7..a8ef57024 100644
--- a/README.md
+++ b/README.md
@@ -12,14 +12,36 @@ Originally Presented on 10/01/2023
[demo-slides.pdf](https://github.com/KittyCAD/Eng/files/10398178/demo.pdf)
-## To run, it's the usual
+## To run, there are a couple steps since we're compiling rust to WASM, you'll need to have rust stuff installed, then
```
yarn install
+```
+then
+```
+yarn build:wasm
+```
+That will build the WASM binary and put in the `public` dir (though gitignored)
+
+finally
+```
yarn start
```
-and `yarn test` for . . . tests
+and `yarn test` you would have need to have built the WASM previously. The tests need to download the binary from a server, so if you've already got `yarn start` running, that will work, otherwise running
+```
+yarn simpleserver
+```
+in one terminal
+and
+```
+yarn test
+```
+in another.
+
+If you want to edit the rust files, you can cd into `src/wasm-lib` and then use the usual rust commands, `cargo build`, `cargo test`, when you want to bring the changes back to the web-app, a fresh `yarn build:wasm` in the root will be needed.
+
+Worth noting that the integration of the WASM into this project is very hacky because I'm really pushing create-react-app further than what's practical, but focusing on features atm rather than the setup.
diff --git a/package.json b/package.json
index 771affb91..fafb8ffb9 100644
--- a/package.json
+++ b/package.json
@@ -15,23 +15,35 @@
"@types/react-dom": "^18.0.0",
"@uiw/react-codemirror": "^4.15.1",
"allotment": "^1.17.0",
+ "http-server": "^14.1.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-json-view": "^1.21.3",
"react-scripts": "5.0.1",
"three": "^0.146.0",
"typescript": "^4.4.2",
+ "util": "^0.12.5",
+ "wasm-pack": "^0.10.3",
"web-vitals": "^2.1.0",
"zustand": "^4.1.4"
},
"scripts": {
"start": "react-scripts start",
- "build": "react-scripts build",
+ "build": "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y && source \"$HOME/.cargo/env\" && curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -y && yarn build:wasm:ci && react-scripts build",
+ "build:local": "react-scripts build",
"test": "react-scripts test",
"test:nowatch": "react-scripts test --watchAll=false",
+ "test:rust": "(cd src/wasm-lib && cargo test && cargo clippy)",
"test:cov": "react-scripts test --watchAll=false --coverage=true",
+ "simpleserver:ci": "http-server ./public --cors -p 3000 &",
+ "simpleserver": "http-server ./public --cors -p 3000",
"eject": "react-scripts eject",
- "fmt": "prettier --write ./src/**.{ts,tsx} && prettier --write ./src/**/*.{ts,tsx} && prettier --write ./src/lang/**/*.{ts,tsx}"
+ "fmt": "prettier --write ./src/**.{ts,tsx} && prettier --write ./src/**/*.{ts,tsx} && prettier --write ./src/lang/**/*.{ts,tsx} && prettier --write ./src/wasm-lib/**/*.{js,ts}",
+ "remove-importmeta": "sed -i '' 's/import.meta.url//g' \"./src/wasm-lib/pkg/wasm_lib.js\"",
+ "remove-importmeta:ci": "sed -i 's/import.meta.url//g' \"./src/wasm-lib/pkg/wasm_lib.js\"",
+ "add-missing-import": "echo \"import util from 'util'; if (typeof window !== 'undefined' && !window.TextEncoder) { window.TextEncoder = util.TextEncoder; window.TextDecoder = util.TextDecoder}\" | cat - ./src/wasm-lib/pkg/wasm_lib.js > temp && mv temp ./src/wasm-lib/pkg/wasm_lib.js",
+ "build:wasm:ci": "mkdir src/wasm-lib/pkg; cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cd ../../ && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn remove-importmeta:ci && yarn add-missing-import && yarn fmt",
+ "build:wasm": "mkdir src/wasm-lib/pkg; cd src/wasm-lib && wasm-pack build --target web --out-dir pkg && cd ../../ && cp src/wasm-lib/pkg/wasm_lib_bg.wasm public && yarn remove-importmeta && yarn add-missing-import && yarn fmt"
},
"jest": {
"transformIgnorePatterns": [
diff --git a/src/components/MemoryPanel.test.tsx b/src/components/MemoryPanel.test.tsx
index 1421b8a51..ead40656f 100644
--- a/src/components/MemoryPanel.test.tsx
+++ b/src/components/MemoryPanel.test.tsx
@@ -2,42 +2,44 @@ import { processMemory } from './MemoryPanel'
import { lexer } from '../lang/tokeniser'
import { abstractSyntaxTree } from '../lang/abstractSyntaxTree'
import { executor } from '../lang/executor'
+import { initPromise } from '../lang/rust'
+
+beforeAll(() => initPromise)
describe('processMemory', () => {
- const code = `
-const myVar = 5
-const myFn = (a) => {
- return a - 2
-}
-const otherVar = myFn(5)
-
-const theExtrude = startSketchAt([0, 0])
- |> lineTo([-2.4, myVar], %)
- |> lineTo([-0.76, otherVar], %)
- |> extrude(4, %)
-
-const theSketch = startSketchAt([0, 0])
- |> lineTo([-3.35, 0.17], %)
- |> lineTo([0.98, 5.16], %)
- |> lineTo([2.15, 4.32], %)
- |> rx(90, %)
-show(theExtrude, theSketch)`
- const tokens = lexer(code)
- const ast = abstractSyntaxTree(tokens)
- const programMemory = executor(ast, {
- root: {
- log: {
- type: 'userVal',
- value: (a: any) => {
- console.log('raw log', a)
- },
- __meta: [],
- },
- },
- _sketch: [],
- })
-
it('should grab the values and remove and geo data', () => {
+ const code = `
+ const myVar = 5
+ const myFn = (a) => {
+ return a - 2
+ }
+ const otherVar = myFn(5)
+
+ const theExtrude = startSketchAt([0, 0])
+ |> lineTo([-2.4, myVar], %)
+ |> lineTo([-0.76, otherVar], %)
+ |> extrude(4, %)
+
+ const theSketch = startSketchAt([0, 0])
+ |> lineTo([-3.35, 0.17], %)
+ |> lineTo([0.98, 5.16], %)
+ |> lineTo([2.15, 4.32], %)
+ |> rx(90, %)
+ show(theExtrude, theSketch)`
+ const tokens = lexer(code)
+ const ast = abstractSyntaxTree(tokens)
+ const programMemory = executor(ast, {
+ root: {
+ log: {
+ type: 'userVal',
+ value: (a: any) => {
+ console.log('raw log', a)
+ },
+ __meta: [],
+ },
+ },
+ _sketch: [],
+ })
const output = processMemory(programMemory)
expect(output.myVar).toEqual(5)
expect(output.myFn).toEqual('__function__')
diff --git a/src/components/RenderViewerArtifacts.tsx b/src/components/RenderViewerArtifacts.tsx
index 5313e31b5..d1dcb7f06 100644
--- a/src/components/RenderViewerArtifacts.tsx
+++ b/src/components/RenderViewerArtifacts.tsx
@@ -79,7 +79,7 @@ function MovingSphere({
const handleMouseUp = () => {
if (isMouseDown && ast) {
const thePath = getNodePathFromSourceRange(ast, sourceRange)
- const yo = point2DRef.current.clone()
+ const current2d = point2DRef.current.clone()
const inverseQuaternion = new Quaternion()
if (
guiMode.mode === 'canEditSketch' ||
@@ -88,8 +88,8 @@ function MovingSphere({
inverseQuaternion.set(...guiMode.rotation)
inverseQuaternion.invert()
}
- yo.sub(new Vector3(...position).applyQuaternion(inverseQuaternion))
- let [x, y] = [roundOff(yo.x, 2), roundOff(yo.y, 2)]
+ current2d.sub(new Vector3(...position).applyQuaternion(inverseQuaternion))
+ let [x, y] = [roundOff(current2d.x, 2), roundOff(current2d.y, 2)]
let theNewPoints: [number, number] = [x, y]
const { modifiedAst } = changeSketchArguments(
ast,
diff --git a/src/lang/abstractSyntaxTree.test.ts b/src/lang/abstractSyntaxTree.test.ts
index 547b179c7..30e2ebfe5 100644
--- a/src/lang/abstractSyntaxTree.test.ts
+++ b/src/lang/abstractSyntaxTree.test.ts
@@ -5,6 +5,9 @@ import {
findEndOfBinaryExpression,
} from './abstractSyntaxTree'
import { lexer } from './tokeniser'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('findClosingBrace', () => {
test('finds the closing brace', () => {
diff --git a/src/lang/artifact.test.ts b/src/lang/artifact.test.ts
index 9cfa347aa..d733c40da 100644
--- a/src/lang/artifact.test.ts
+++ b/src/lang/artifact.test.ts
@@ -1,6 +1,9 @@
import { abstractSyntaxTree } from './abstractSyntaxTree'
import { lexer } from './tokeniser'
import { executor, SketchGroup, ExtrudeGroup } from './executor'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('testing artifacts', () => {
test('sketch artifacts', () => {
diff --git a/src/lang/astMathExpressions.test.ts b/src/lang/astMathExpressions.test.ts
index 5c3ab5b18..05565125a 100644
--- a/src/lang/astMathExpressions.test.ts
+++ b/src/lang/astMathExpressions.test.ts
@@ -1,5 +1,8 @@
import { parseExpression, reversePolishNotation } from './astMathExpressions'
import { lexer } from './tokeniser'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('parseExpression', () => {
it('parses a simple expression', () => {
diff --git a/src/lang/executor.test.ts b/src/lang/executor.test.ts
index 34537e4ca..6206082f2 100644
--- a/src/lang/executor.test.ts
+++ b/src/lang/executor.test.ts
@@ -3,6 +3,9 @@ import fs from 'node:fs'
import { abstractSyntaxTree } from './abstractSyntaxTree'
import { lexer } from './tokeniser'
import { executor, ProgramMemory, Path, SketchGroup } from './executor'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('test', () => {
it('test assigning two variables, the second summing with the first', () => {
diff --git a/src/lang/getNodePathFromSourceRange.test.ts b/src/lang/getNodePathFromSourceRange.test.ts
index 0bf4dcda3..d6e8cf2cc 100644
--- a/src/lang/getNodePathFromSourceRange.test.ts
+++ b/src/lang/getNodePathFromSourceRange.test.ts
@@ -1,6 +1,9 @@
import { getNodePathFromSourceRange } from './abstractSyntaxTree'
import { lexer } from './tokeniser'
import { abstractSyntaxTree, getNodeFromPath } from './abstractSyntaxTree'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('testing getNodePathFromSourceRange', () => {
it('test it gets the right path for a `lineTo` CallExpression within a SketchExpression', () => {
diff --git a/src/lang/recast.test.ts b/src/lang/recast.test.ts
index c75623499..d02830586 100644
--- a/src/lang/recast.test.ts
+++ b/src/lang/recast.test.ts
@@ -2,6 +2,9 @@ import { recast } from './recast'
import { Program, abstractSyntaxTree } from './abstractSyntaxTree'
import { lexer, Token } from './tokeniser'
import fs from 'node:fs'
+import { initPromise } from './rust'
+
+beforeAll(() => initPromise)
describe('recast', () => {
it('recasts a simple program', () => {
diff --git a/src/lang/rust.ts b/src/lang/rust.ts
new file mode 100644
index 000000000..2bee5c8c9
--- /dev/null
+++ b/src/lang/rust.ts
@@ -0,0 +1,10 @@
+import init from '../wasm-lib/pkg/wasm_lib'
+
+const url =
+ typeof window === 'undefined'
+ ? 'http://127.0.0.1:3000'
+ : window.location.origin.includes('localhost')
+ ? 'http://127.0.0.1:3000'
+ : window.location.origin
+const fullUrl = url + '/wasm_lib_bg.wasm'
+export const initPromise = init(fullUrl)
diff --git a/src/lang/std/sketch.test.ts b/src/lang/std/sketch.test.ts
index 80e6b9606..03ab90fe8 100644
--- a/src/lang/std/sketch.test.ts
+++ b/src/lang/std/sketch.test.ts
@@ -12,6 +12,9 @@ import {
} from '../abstractSyntaxTree'
import { recast } from '../recast'
import { executor } from '../executor'
+import { initPromise } from '../rust'
+
+beforeAll(() => initPromise)
const eachQuad: [number, [number, number]][] = [
[-315, [1, 1]],
@@ -159,32 +162,34 @@ show(mySketch001)`
})
describe('testing addTagForSketchOnFace', () => {
- const originalLine = 'lineTo([-1.59, -1.54], %)'
- const genCode = (line: string) => `
-const mySketch001 = startSketchAt([0, 0])
- |> rx(45, %)
- |> ${line}
- |> lineTo([0.46, -5.82], %)
-show(mySketch001)`
- const code = genCode(originalLine)
- const ast = abstractSyntaxTree(lexer(code))
- const programMemory = executor(ast)
- const sourceStart = code.indexOf(originalLine)
- const sourceRange: [number, number] = [
- sourceStart,
- sourceStart + originalLine.length,
- ]
- const pathToNode = getNodePathFromSourceRange(ast, sourceRange)
- const { modifiedAst } = addTagForSketchOnFace(
- {
- previousProgramMemory: programMemory,
- pathToNode,
- node: ast,
- },
- 'lineTo'
- )
- const expectedCode = genCode(
- "lineTo({ to: [-1.59, -1.54], tag: 'seg01' }, %)"
- )
- expect(recast(modifiedAst)).toBe(expectedCode)
+ it('needs to be in it', () => {
+ const originalLine = 'lineTo([-1.59, -1.54], %)'
+ const genCode = (line: string) => `
+ const mySketch001 = startSketchAt([0, 0])
+ |> rx(45, %)
+ |> ${line}
+ |> lineTo([0.46, -5.82], %)
+ show(mySketch001)`
+ const code = genCode(originalLine)
+ const ast = abstractSyntaxTree(lexer(code))
+ const programMemory = executor(ast)
+ const sourceStart = code.indexOf(originalLine)
+ const sourceRange: [number, number] = [
+ sourceStart,
+ sourceStart + originalLine.length,
+ ]
+ const pathToNode = getNodePathFromSourceRange(ast, sourceRange)
+ const { modifiedAst } = addTagForSketchOnFace(
+ {
+ previousProgramMemory: programMemory,
+ pathToNode,
+ node: ast,
+ },
+ 'lineTo'
+ )
+ const expectedCode = genCode(
+ "lineTo({ to: [-1.59, -1.54], tag: 'seg01' }, %)"
+ )
+ expect(recast(modifiedAst)).toBe(expectedCode)
+ })
})
diff --git a/src/lang/tokeniser.test.ts b/src/lang/tokeniser.test.ts
index 2c4158a6d..7c2a2fd65 100644
--- a/src/lang/tokeniser.test.ts
+++ b/src/lang/tokeniser.test.ts
@@ -1,174 +1,7 @@
-import {
- isBlockEnd,
- isBlockStart,
- isNumber,
- isOperator,
- isParanEnd,
- isParanStart,
- isString,
- isWhitespace,
- isWord,
- isComma,
- lexer,
- isLineComment,
- isBlockComment,
-} from './tokeniser'
+import { lexer } from './tokeniser'
+import { initPromise } from './rust'
-describe('testing helpers', () => {
- it('test is number', () => {
- expect(isNumber('1')).toBe(true)
- expect(isNumber('5?')).toBe(true)
- expect(isNumber('5 + 6')).toBe(true)
- expect(isNumber('5 + a')).toBe(true)
- expect(isNumber('-5')).toBe(true)
- expect(isNumber('5.5')).toBe(true)
- expect(isNumber('-5.5')).toBe(true)
-
- expect(isNumber('a')).toBe(false)
- expect(isNumber('?')).toBe(false)
- expect(isNumber('?5')).toBe(false)
- })
- it('test is whitespace', () => {
- expect(isWhitespace(' ')).toBe(true)
- expect(isWhitespace(' ')).toBe(true)
- expect(isWhitespace(' a')).toBe(true)
- expect(isWhitespace('a ')).toBe(true)
-
- expect(isWhitespace('a')).toBe(false)
- expect(isWhitespace('?')).toBe(false)
- })
- it('test is word', () => {
- expect(isWord('a')).toBe(true)
- expect(isWord('a ')).toBe(true)
- expect(isWord('a5')).toBe(true)
- expect(isWord('a5a')).toBe(true)
-
- expect(isWord('5')).toBe(false)
- expect(isWord('5a')).toBe(false)
- expect(isWord('5a5')).toBe(false)
- })
- it('test is string', () => {
- expect(isString('""')).toBe(true)
- expect(isString('"a"')).toBe(true)
- expect(isString('"a" ')).toBe(true)
- expect(isString('"a"5')).toBe(true)
- expect(isString("'a'5")).toBe(true)
- expect(isString('"with escaped \\" backslash"')).toBe(true)
-
- expect(isString('"')).toBe(false)
- expect(isString('"a')).toBe(false)
- expect(isString('a"')).toBe(false)
- expect(isString(' "a"')).toBe(false)
- expect(isString('5"a"')).toBe(false)
- })
- it('test is operator', () => {
- expect(isOperator('+')).toBe(true)
- expect(isOperator('+ ')).toBe(true)
- expect(isOperator('-')).toBe(true)
- expect(isOperator('<=')).toBe(true)
- expect(isOperator('<= ')).toBe(true)
- expect(isOperator('>=')).toBe(true)
- expect(isOperator('>= ')).toBe(true)
- expect(isOperator('> ')).toBe(true)
- expect(isOperator('< ')).toBe(true)
- expect(isOperator('| ')).toBe(true)
- expect(isOperator('|> ')).toBe(true)
- expect(isOperator('^ ')).toBe(true)
- expect(isOperator('% ')).toBe(true)
- expect(isOperator('+* ')).toBe(true)
-
- expect(isOperator('5 + 5')).toBe(false)
- expect(isOperator('a')).toBe(false)
- expect(isOperator('a+')).toBe(false)
- expect(isOperator('a+5')).toBe(false)
- expect(isOperator('5a+5')).toBe(false)
- expect(isOperator(', newVar')).toBe(false)
- expect(isOperator(',')).toBe(false)
- })
- it('test is paran start', () => {
- expect(isParanStart('(')).toBe(true)
- expect(isParanStart('( ')).toBe(true)
- expect(isParanStart('(5')).toBe(true)
- expect(isParanStart('(5 ')).toBe(true)
- expect(isParanStart('(5 + 5')).toBe(true)
- expect(isParanStart('(5 + 5)')).toBe(true)
- expect(isParanStart('(5 + 5) ')).toBe(true)
-
- expect(isParanStart('5')).toBe(false)
- expect(isParanStart('5 + 5')).toBe(false)
- expect(isParanStart('5( + 5)')).toBe(false)
- expect(isParanStart(' ( + 5)')).toBe(false)
- })
- it('test is paran end', () => {
- expect(isParanEnd(')')).toBe(true)
- expect(isParanEnd(') ')).toBe(true)
- expect(isParanEnd(')5')).toBe(true)
- expect(isParanEnd(')5 ')).toBe(true)
-
- expect(isParanEnd('5')).toBe(false)
- expect(isParanEnd('5 + 5')).toBe(false)
- expect(isParanEnd('5) + 5')).toBe(false)
- expect(isParanEnd(' ) + 5')).toBe(false)
- })
- it('test is block start', () => {
- expect(isBlockStart('{')).toBe(true)
- expect(isBlockStart('{ ')).toBe(true)
- expect(isBlockStart('{5')).toBe(true)
- expect(isBlockStart('{a')).toBe(true)
- expect(isBlockStart('{5 ')).toBe(true)
-
- expect(isBlockStart('5')).toBe(false)
- expect(isBlockStart('5 + 5')).toBe(false)
- expect(isBlockStart('5{ + 5')).toBe(false)
- expect(isBlockStart('a{ + 5')).toBe(false)
- expect(isBlockStart(' { + 5')).toBe(false)
- })
- it('test is block end', () => {
- expect(isBlockEnd('}')).toBe(true)
- expect(isBlockEnd('} ')).toBe(true)
- expect(isBlockEnd('}5')).toBe(true)
- expect(isBlockEnd('}5 ')).toBe(true)
-
- expect(isBlockEnd('5')).toBe(false)
- expect(isBlockEnd('5 + 5')).toBe(false)
- expect(isBlockEnd('5} + 5')).toBe(false)
- expect(isBlockEnd(' } + 5')).toBe(false)
- })
- it('test is comma', () => {
- expect(isComma(',')).toBe(true)
- expect(isComma(', ')).toBe(true)
- expect(isComma(',5')).toBe(true)
- expect(isComma(',5 ')).toBe(true)
-
- expect(isComma('5')).toBe(false)
- expect(isComma('5 + 5')).toBe(false)
- expect(isComma('5, + 5')).toBe(false)
- expect(isComma(' , + 5')).toBe(false)
- })
- it('test it matches line comments', () => {
- expect(isLineComment('//')).toBe(true)
- expect(isLineComment('// ')).toBe(true)
- expect(isLineComment('//5')).toBe(true)
- expect(isLineComment('//5 ')).toBe(true)
-
- expect(isLineComment('5')).toBe(false)
- expect(isLineComment('5 + 5')).toBe(false)
- expect(isLineComment('5// + 5')).toBe(false)
- expect(isLineComment(' // + 5')).toBe(false)
- })
- it('test it matches block comments', () => {
- expect(isBlockComment('/* */')).toBe(true)
- expect(isBlockComment('/**/')).toBe(true)
- expect(isBlockComment('/*5*/')).toBe(true)
- expect(isBlockComment('/*5 */')).toBe(true)
-
- expect(isBlockComment('/*')).toBe(false)
- expect(isBlockComment('5')).toBe(false)
- expect(isBlockComment('5 + 5')).toBe(false)
- expect(isBlockComment('5/* + 5')).toBe(false)
- expect(isBlockComment(' /* + 5')).toBe(false)
- })
-})
+beforeAll(() => initPromise)
describe('testing lexer', () => {
it('test lexer', () => {
diff --git a/src/lang/tokeniser.ts b/src/lang/tokeniser.ts
index 2684797eb..0294b7b1d 100644
--- a/src/lang/tokeniser.ts
+++ b/src/lang/tokeniser.ts
@@ -1,50 +1,5 @@
-// regular expression for number that includes a decimal point or starts with a minus sign
-const NUMBER = /^-?\d+(\.\d+)?/
-
-const WHITESPACE = /\s+/
-const WORD = /^[a-zA-Z_][a-zA-Z0-9_]*/
-// regex that captures everything between two non escaped quotes and the quotes aren't captured in the match
-const STRING = /^(["'])(?:(?=(\\?))\2.)*?\1/
-// verbose regex for finding operators, multiple character operators need to be first
-const OPERATOR = /^(>=|<=|==|=>|!= |\|>|\*|\+|-|\/|%|=|<|>|\||\^)/
-
-const BLOCK_START = /^\{/
-const BLOCK_END = /^\}/
-const PARAN_START = /^\(/
-const PARAN_END = /^\)/
-const ARRAY_START = /^\[/
-const ARRAY_END = /^\]/
-const COMMA = /^,/
-const COLON = /^:/
-const PERIOD = /^\./
-const LINECOMMENT = /^\/\/.*/
-const BLOCKCOMMENT = /^\/\*[\s\S]*?\*\//
-
-export const isNumber = (character: string) => NUMBER.test(character)
-export const isWhitespace = (character: string) => WHITESPACE.test(character)
-export const isWord = (character: string) => WORD.test(character)
-export const isString = (character: string) => STRING.test(character)
-export const isOperator = (character: string) => OPERATOR.test(character)
-export const isBlockStart = (character: string) => BLOCK_START.test(character)
-export const isBlockEnd = (character: string) => BLOCK_END.test(character)
-export const isParanStart = (character: string) => PARAN_START.test(character)
-export const isParanEnd = (character: string) => PARAN_END.test(character)
-export const isArrayStart = (character: string) => ARRAY_START.test(character)
-export const isArrayEnd = (character: string) => ARRAY_END.test(character)
-export const isComma = (character: string) => COMMA.test(character)
-export const isColon = (character: string) => COLON.test(character)
-export const isPeriod = (character: string) => PERIOD.test(character)
-export const isLineComment = (character: string) => LINECOMMENT.test(character)
-export const isBlockComment = (character: string) =>
- BLOCKCOMMENT.test(character)
-
-function matchFirst(str: string, regex: RegExp) {
- const theMatch = str.match(regex)
- if (!theMatch) {
- throw new Error('Should always be a match:' + str)
- }
- return theMatch[0]
-}
+import { lexer_js } from '../wasm-lib/pkg/wasm_lib'
+import { initPromise } from './rust'
export interface Token {
type:
@@ -64,89 +19,11 @@ export interface Token {
end: number
}
-const makeToken = (
- type: Token['type'],
- value: string,
- start: number
-): Token => ({
- type,
- value,
- start,
- end: start + value.length,
-})
-
-const returnTokenAtIndex = (str: string, startIndex: number): Token | null => {
- const strFromIndex = str.slice(startIndex)
- if (isString(strFromIndex)) {
- return makeToken('string', matchFirst(strFromIndex, STRING), startIndex)
- }
- const isLineCommentBool = isLineComment(strFromIndex)
- if (isLineCommentBool || isBlockComment(strFromIndex)) {
- return makeToken(
- isLineCommentBool ? 'linecomment' : 'blockcomment',
- matchFirst(strFromIndex, isLineCommentBool ? LINECOMMENT : BLOCKCOMMENT),
- startIndex
- )
- }
- if (isParanEnd(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, PARAN_END), startIndex)
- }
- if (isParanStart(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, PARAN_START), startIndex)
- }
- if (isBlockStart(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, BLOCK_START), startIndex)
- }
- if (isBlockEnd(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, BLOCK_END), startIndex)
- }
- if (isArrayStart(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, ARRAY_START), startIndex)
- }
- if (isArrayEnd(strFromIndex)) {
- return makeToken('brace', matchFirst(strFromIndex, ARRAY_END), startIndex)
- }
- if (isComma(strFromIndex)) {
- return makeToken('comma', matchFirst(strFromIndex, COMMA), startIndex)
- }
- if (isNumber(strFromIndex)) {
- return makeToken('number', matchFirst(strFromIndex, NUMBER), startIndex)
- }
- if (isOperator(strFromIndex)) {
- return makeToken('operator', matchFirst(strFromIndex, OPERATOR), startIndex)
- }
- if (isWord(strFromIndex)) {
- return makeToken('word', matchFirst(strFromIndex, WORD), startIndex)
- }
- if (isColon(strFromIndex))
- return makeToken('colon', matchFirst(strFromIndex, COLON), startIndex)
- if (isPeriod(strFromIndex))
- return makeToken('period', matchFirst(strFromIndex, PERIOD), startIndex)
- if (isWhitespace(strFromIndex)) {
- return makeToken(
- 'whitespace',
- matchFirst(strFromIndex, WHITESPACE),
- startIndex
- )
- }
- return null
+export async function asyncLexer(str: string): Promise {
+ await initPromise
+ return JSON.parse(lexer_js(str)) as Token[]
}
-export const lexer = (str: string): Token[] => {
- const recursivelyTokenise = (
- str: string,
- currentIndex: number = 0,
- previousTokens: Token[] = []
- ): Token[] => {
- if (currentIndex >= str.length) {
- return previousTokens
- }
- const token = returnTokenAtIndex(str, currentIndex)
- if (!token) {
- return recursivelyTokenise(str, currentIndex + 1, previousTokens)
- }
- const nextIndex = currentIndex + token.value.length
- return recursivelyTokenise(str, nextIndex, [...previousTokens, token])
- }
- return recursivelyTokenise(str)
+export function lexer(str: string): Token[] {
+ return JSON.parse(lexer_js(str)) as Token[]
}
diff --git a/src/useStore.ts b/src/useStore.ts
index cdf0085e0..2d64b8f0d 100644
--- a/src/useStore.ts
+++ b/src/useStore.ts
@@ -7,7 +7,7 @@ import {
} from './lang/abstractSyntaxTree'
import { ProgramMemory, Position, PathToNode, Rotation } from './lang/executor'
import { recast } from './lang/recast'
-import { lexer } from './lang/tokeniser'
+import { asyncLexer } from './lang/tokeniser'
export type Range = [number, number]
export type TooTip =
@@ -155,9 +155,9 @@ export const useStore = create()((set, get) => ({
setAst: (ast) => {
set({ ast })
},
- updateAst: (ast, focusPath) => {
+ updateAst: async (ast, focusPath) => {
const newCode = recast(ast)
- const astWithUpdatedSource = abstractSyntaxTree(lexer(newCode))
+ const astWithUpdatedSource = abstractSyntaxTree(await asyncLexer(newCode))
set({ ast: astWithUpdatedSource, code: newCode })
if (focusPath) {
@@ -173,9 +173,9 @@ export const useStore = create()((set, get) => ({
setCode: (code) => {
set({ code })
},
- formatCode: () => {
+ formatCode: async () => {
const code = get().code
- const ast = abstractSyntaxTree(lexer(code))
+ const ast = abstractSyntaxTree(await asyncLexer(code))
const newCode = recast(ast)
set({ code: newCode, ast })
},
diff --git a/src/wasm-lib/Cargo.lock b/src/wasm-lib/Cargo.lock
new file mode 100644
index 000000000..951c455a5
--- /dev/null
+++ b/src/wasm-lib/Cargo.lock
@@ -0,0 +1,211 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "itoa"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "once_cell"
+version = "1.17.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "regex"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
+
+[[package]]
+name = "ryu"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
+
+[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.107"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
+
+[[package]]
+name = "wasm-lib"
+version = "0.1.0"
+dependencies = [
+ "lazy_static",
+ "regex",
+ "serde",
+ "serde_json",
+ "wasm-bindgen",
+]
diff --git a/src/wasm-lib/Cargo.toml b/src/wasm-lib/Cargo.toml
new file mode 100644
index 000000000..71ff9bf29
--- /dev/null
+++ b/src/wasm-lib/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "wasm-lib"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+[lib]
+crate-type = ["cdylib"]
+
+[dependencies]
+lazy_static = "1.4.0"
+regex = "1.7.1"
+serde = {version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
+wasm-bindgen = "0.2.78"
diff --git a/src/wasm-lib/src/lib.rs b/src/wasm-lib/src/lib.rs
new file mode 100644
index 000000000..43cb6df4b
--- /dev/null
+++ b/src/wasm-lib/src/lib.rs
@@ -0,0 +1 @@
+mod tokeniser;
diff --git a/src/wasm-lib/src/tokeniser.rs b/src/wasm-lib/src/tokeniser.rs
new file mode 100644
index 000000000..3d1091196
--- /dev/null
+++ b/src/wasm-lib/src/tokeniser.rs
@@ -0,0 +1,602 @@
+extern crate lazy_static;
+extern crate regex;
+
+use wasm_bindgen::prelude::*;
+use lazy_static::lazy_static;
+use regex::Regex;
+use serde::{Deserialize, Serialize};
+
+#[wasm_bindgen]
+#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
+#[serde(rename_all = "lowercase")]
+pub enum TokenType {
+ Number,
+ Word,
+ Operator,
+ String,
+ Brace,
+ Whitespace,
+ Comma,
+ Colon,
+ Period,
+ LineComment,
+ BlockComment,
+}
+
+#[wasm_bindgen]
+#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, Clone)]
+pub struct Token {
+ #[serde(rename = "type")]
+ pub token_type: TokenType,
+ pub start: usize,
+ pub end: usize,
+ #[wasm_bindgen(skip)]
+ pub value: String,
+}
+#[wasm_bindgen]
+impl Token {
+ #[wasm_bindgen(constructor)]
+ pub fn new(token_type: TokenType, value: String, start: usize, end: usize) -> Token {
+ Token { token_type, value, start, end }
+ }
+
+ #[wasm_bindgen(getter)]
+ pub fn value(&self) -> String {
+ self.value.clone()
+ }
+
+ #[wasm_bindgen(setter)]
+ pub fn set_value(&mut self, value: String) {
+ self.value = value;
+ }
+}
+
+lazy_static! {
+ static ref NUMBER: Regex = Regex::new(r"^-?\d+(\.\d+)?").unwrap();
+ static ref WHITESPACE: Regex = Regex::new(r"\s+").unwrap();
+ static ref WORD: Regex = Regex::new(r"^[a-zA-Z_][a-zA-Z0-9_]*").unwrap();
+ static ref STRING: Regex = Regex::new(r#"^"([^"\\]|\\.)*"|'([^'\\]|\\.)*'"#).unwrap();
+ static ref OPERATOR: Regex = Regex::new(r"^(>=|<=|==|=>|!= |\|>|\*|\+|-|/|%|=|<|>|\||\^)").unwrap();
+ static ref BLOCK_START: Regex = Regex::new(r"^\{").unwrap();
+ static ref BLOCK_END: Regex = Regex::new(r"^\}").unwrap();
+ static ref PARAN_START: Regex = Regex::new(r"^\(").unwrap();
+ static ref PARAN_END: Regex = Regex::new(r"^\)").unwrap();
+ static ref ARRAY_START: Regex = Regex::new(r"^\[").unwrap();
+ static ref ARRAY_END: Regex = Regex::new(r"^\]").unwrap();
+ static ref COMMA: Regex = Regex::new(r"^,").unwrap();
+ static ref COLON: Regex = Regex::new(r"^:").unwrap();
+ static ref PERIOD: Regex = Regex::new(r"^\.").unwrap();
+ static ref LINECOMMENT: Regex = Regex::new(r"^//.*").unwrap();
+ static ref BLOCKCOMMENT: Regex = Regex::new(r"^/\*[\s\S]*?\*/").unwrap();
+}
+
+fn is_number(character: &str) -> bool {
+ NUMBER.is_match(character)
+}
+fn is_whitespace(character: &str) -> bool {
+ WHITESPACE.is_match(character)
+}
+fn is_word(character: &str) -> bool {
+ WORD.is_match(character)
+}
+fn is_string(character: &str) -> bool {
+ match STRING.find(character) {
+ Some(m) => m.start() == 0,
+ None => false,
+ }
+}
+fn is_operator(character: &str) -> bool {
+ OPERATOR.is_match(character)
+}
+fn is_block_start(character: &str) -> bool {
+ BLOCK_START.is_match(character)
+}
+fn is_block_end(character: &str) -> bool {
+ BLOCK_END.is_match(character)
+}
+fn is_paran_start(character: &str) -> bool {
+ PARAN_START.is_match(character)
+}
+fn is_paran_end(character: &str) -> bool {
+ PARAN_END.is_match(character)
+}
+fn is_array_start(character: &str) -> bool {
+ ARRAY_START.is_match(character)
+}
+fn is_array_end(character: &str) -> bool {
+ ARRAY_END.is_match(character)
+}
+fn is_comma(character: &str) -> bool {
+ COMMA.is_match(character)
+}
+fn is_colon(character: &str) -> bool {
+ COLON.is_match(character)
+}
+fn is_period(character: &str) -> bool {
+ PERIOD.is_match(character)
+}
+fn is_line_comment(character: &str) -> bool {
+ LINECOMMENT.is_match(character)
+}
+fn is_block_comment(character: &str) -> bool {
+ BLOCKCOMMENT.is_match(character)
+}
+
+fn match_first(str: &str, regex: &Regex) -> String {
+ let the_match = regex.find(str).unwrap();
+ let the_match_str = &str[the_match.start()..the_match.end()];
+ the_match_str.to_string()
+}
+
+fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
+ Token {
+ token_type,
+ value: value.to_string(),
+ start,
+ end: start + value.len(),
+ }
+}
+
+
+fn return_token_at_index(str: &str, start_index: usize) -> Option {
+ let str_from_index = &str[start_index..];
+ if is_string(str_from_index) {
+ return Some(make_token(
+ TokenType::String,
+ &match_first(str_from_index, &STRING),
+ start_index,
+ ));
+ }
+ let is_line_comment_bool = is_line_comment(str_from_index);
+ if is_line_comment_bool || is_block_comment(str_from_index) {
+ return Some(make_token(
+ if is_line_comment_bool {
+ TokenType::LineComment
+ } else {
+ TokenType::BlockComment
+ },
+ &match_first(
+ str_from_index,
+ if is_line_comment_bool {
+ &LINECOMMENT
+ } else {
+ &BLOCKCOMMENT
+ },
+ ),
+ start_index,
+ ));
+ }
+ if is_paran_end(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &PARAN_END),
+ start_index,
+ ));
+ }
+ if is_paran_start(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &PARAN_START),
+ start_index,
+ ));
+ }
+ if is_block_start(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &BLOCK_START),
+ start_index,
+ ));
+ }
+ if is_block_end(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &BLOCK_END),
+ start_index,
+ ));
+ }
+ if is_array_start(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &ARRAY_START),
+ start_index,
+ ));
+ }
+ if is_array_end(str_from_index) {
+ return Some(make_token(
+ TokenType::Brace,
+ &match_first(str_from_index, &ARRAY_END),
+ start_index,
+ ));
+ }
+ if is_comma(str_from_index) {
+ return Some(make_token(
+ TokenType::Comma,
+ &match_first(str_from_index, &COMMA),
+ start_index,
+ ));
+ }
+ if is_number(str_from_index) {
+ return Some(make_token(
+ TokenType::Number,
+ &match_first(str_from_index, &NUMBER),
+ start_index,
+ ));
+ }
+ if is_operator(str_from_index) {
+ return Some(make_token(
+ TokenType::Operator,
+ &match_first(str_from_index, &OPERATOR),
+ start_index,
+ ));
+ }
+ if is_word(str_from_index) {
+ return Some(make_token(
+ TokenType::Word,
+ &match_first(str_from_index, &WORD),
+ start_index,
+ ));
+ }
+ if is_colon(str_from_index) {
+ return Some(make_token(
+ TokenType::Colon,
+ &match_first(str_from_index, &COLON),
+ start_index,
+ ));
+ }
+ if is_period(str_from_index) {
+ return Some(make_token(
+ TokenType::Period,
+ &match_first(str_from_index, &PERIOD),
+ start_index,
+ ));
+ }
+ if is_whitespace(str_from_index) {
+ return Some(make_token(
+ TokenType::Whitespace,
+ &match_first(str_from_index, &WHITESPACE),
+ start_index,
+ ));
+ }
+ None
+}
+
+fn lexer(str: &str) -> Vec {
+ fn recursively_tokenise(str: &str, current_index: usize, previous_tokens: Vec) -> Vec {
+ if current_index >= str.len() {
+ return previous_tokens;
+ }
+ let token = return_token_at_index(str, current_index);
+ if token.is_none() {
+ return recursively_tokenise(str, current_index + 1, previous_tokens)
+ }
+ let token = token.unwrap();
+ let mut new_tokens = previous_tokens;
+ let token_length = token.value.len();
+ new_tokens.push(token);
+ recursively_tokenise(str, current_index + token_length, new_tokens)
+ }
+ recursively_tokenise(str, 0, Vec::new())
+}
+
+// wasm_bindgen wrapper for lexer
+// test for this function and by extension lexer are done in javascript land src/lang/tokeniser.test.ts
+#[wasm_bindgen]
+pub fn lexer_js(str: &str) -> JsValue {
+ let tokens = lexer(str);
+ JsValue::from_str(
+ &serde_json::to_string(&tokens)
+ .expect("failed to serialize lexer output"),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn is_number_test() {
+ assert_eq!(is_number("1"), true);
+ assert_eq!(is_number("1 abc"), true);
+ assert_eq!(is_number("1abc"), true);
+ assert_eq!(is_number("1.1"), true);
+ assert_eq!(is_number("1.1 abc"), true);
+ assert_eq!(is_number("a"), false);
+
+
+ assert_eq!(is_number("1"), true);
+ assert_eq!(is_number("5?"), true);
+ assert_eq!(is_number("5 + 6"), true);
+ assert_eq!(is_number("5 + a"), true);
+ assert_eq!(is_number("-5"), true);
+ assert_eq!(is_number("5.5"), true);
+ assert_eq!(is_number("-5.5"), true);
+
+ assert_eq!(is_number("a"), false);
+ assert_eq!(is_number("?"), false);
+ assert_eq!(is_number("?5"), false);
+
+ }
+
+ #[test]
+ fn is_whitespace_test() {
+ assert_eq!(is_whitespace(" "), true);
+ assert_eq!(is_whitespace(" "), true);
+ assert_eq!(is_whitespace(" a"), true);
+ assert_eq!(is_whitespace("a "), true);
+
+ assert_eq!(is_whitespace("a"), false);
+ assert_eq!(is_whitespace("?"), false);
+ }
+
+ #[test]
+ fn is_word_test() {
+ assert_eq!(is_word("a"), true);
+ assert_eq!(is_word("a "), true);
+ assert_eq!(is_word("a5"), true);
+ assert_eq!(is_word("a5a"), true);
+
+ assert_eq!(is_word("5"), false);
+ assert_eq!(is_word("5a"), false);
+ assert_eq!(is_word("5a5"), false);
+ }
+
+ #[test]
+ fn is_string_test() {
+ assert_eq!(is_string("\"\""), true);
+ assert_eq!(is_string("\"a\""), true);
+ assert_eq!(is_string("\"a\" "), true);
+ assert_eq!(is_string("\"a\"5"), true);
+ assert_eq!(is_string("'a'5"), true);
+ assert_eq!(is_string("\"with escaped \\\" backslash\""), true);
+
+ assert_eq!(is_string("\""), false);
+ assert_eq!(is_string("\"a"), false);
+ assert_eq!(is_string("a\""), false);
+ assert_eq!(is_string(" \"a\""), false);
+ assert_eq!(is_string("5\"a\""), false);
+ assert_eq!(is_string("a + 'str'"), false);
+ }
+
+ #[test]
+ fn is_operator_test() {
+ assert_eq!(is_operator("+"), true);
+ assert_eq!(is_operator("+ "), true);
+ assert_eq!(is_operator("-"), true);
+ assert_eq!(is_operator("<="), true);
+ assert_eq!(is_operator("<= "), true);
+ assert_eq!(is_operator(">="), true);
+ assert_eq!(is_operator(">= "), true);
+ assert_eq!(is_operator("> "), true);
+ assert_eq!(is_operator("< "), true);
+ assert_eq!(is_operator("| "), true);
+ assert_eq!(is_operator("|> "), true);
+ assert_eq!(is_operator("^ "), true);
+ assert_eq!(is_operator("% "), true);
+ assert_eq!(is_operator("+* "), true);
+
+ assert_eq!(is_operator("5 + 5"), false);
+ assert_eq!(is_operator("a"), false);
+ assert_eq!(is_operator("a+"), false);
+ assert_eq!(is_operator("a+5"), false);
+ assert_eq!(is_operator("5a+5"), false);
+ assert_eq!(is_operator(", newVar"), false);
+ assert_eq!(is_operator(","), false);
+ }
+
+ #[test]
+ fn is_block_start_test() {
+ assert_eq!(is_block_start("{"), true);
+ assert_eq!(is_block_start("{ "), true);
+ assert_eq!(is_block_start("{5"), true);
+ assert_eq!(is_block_start("{a"), true);
+ assert_eq!(is_block_start("{5 "), true);
+
+ assert_eq!(is_block_start("5"), false);
+ assert_eq!(is_block_start("5 + 5"), false);
+ assert_eq!(is_block_start("5{ + 5"), false);
+ assert_eq!(is_block_start("a{ + 5"), false);
+ assert_eq!(is_block_start(" { + 5"), false);
+ }
+
+ #[test]
+ fn is_block_end_test() {
+ assert_eq!(is_block_end("}"), true);
+ assert_eq!(is_block_end("} "), true);
+ assert_eq!(is_block_end("}5"), true);
+ assert_eq!(is_block_end("}5 "), true);
+
+ assert_eq!(is_block_end("5"), false);
+ assert_eq!(is_block_end("5 + 5"), false);
+ assert_eq!(is_block_end("5} + 5"), false);
+ assert_eq!(is_block_end(" } + 5"), false);
+ }
+
+ #[test]
+ fn is_paran_start_test() {
+ assert_eq!(is_paran_start("("), true);
+ assert_eq!(is_paran_start("( "), true);
+ assert_eq!(is_paran_start("(5"), true);
+ assert_eq!(is_paran_start("(5 "), true);
+ assert_eq!(is_paran_start("(5 + 5"), true);
+ assert_eq!(is_paran_start("(5 + 5)"), true);
+ assert_eq!(is_paran_start("(5 + 5) "), true);
+
+ assert_eq!(is_paran_start("5"), false);
+ assert_eq!(is_paran_start("5 + 5"), false);
+ assert_eq!(is_paran_start("5( + 5)"), false);
+ assert_eq!(is_paran_start(" ( + 5)"), false);
+ }
+
+ #[test]
+ fn is_paran_end_test() {
+ assert_eq!(is_paran_end(")"), true);
+ assert_eq!(is_paran_end(") "), true);
+ assert_eq!(is_paran_end(")5"), true);
+ assert_eq!(is_paran_end(")5 "), true);
+
+ assert_eq!(is_paran_end("5"), false);
+ assert_eq!(is_paran_end("5 + 5"), false);
+ assert_eq!(is_paran_end("5) + 5"), false);
+ assert_eq!(is_paran_end(" ) + 5"), false);
+ }
+
+ #[test]
+ fn is_comma_test() {
+ assert_eq!(is_comma(","), true);
+ assert_eq!(is_comma(", "), true);
+ assert_eq!(is_comma(",5"), true);
+ assert_eq!(is_comma(",5 "), true);
+
+ assert_eq!(is_comma("5"), false);
+ assert_eq!(is_comma("5 + 5"), false);
+ assert_eq!(is_comma("5, + 5"), false);
+ assert_eq!(is_comma(" , + 5"), false);
+ }
+
+ #[test]
+ fn is_line_comment_test() {
+ assert_eq!(is_line_comment("//"), true);
+ assert_eq!(is_line_comment("// "), true);
+ assert_eq!(is_line_comment("//5"), true);
+ assert_eq!(is_line_comment("//5 "), true);
+
+ assert_eq!(is_line_comment("5"), false);
+ assert_eq!(is_line_comment("5 + 5"), false);
+ assert_eq!(is_line_comment("5// + 5"), false);
+ assert_eq!(is_line_comment(" // + 5"), false);
+ }
+
+ #[test]
+ fn is_block_comment_test() {
+ assert_eq!(is_block_comment("/* */"), true);
+ assert_eq!(is_block_comment("/***/"), true);
+ assert_eq!(is_block_comment("/*5*/"), true);
+ assert_eq!(is_block_comment("/*5 */"), true);
+
+ assert_eq!(is_block_comment("/*"), false);
+ assert_eq!(is_block_comment("5"), false);
+ assert_eq!(is_block_comment("5 + 5"), false);
+ assert_eq!(is_block_comment("5/* + 5"), false);
+ assert_eq!(is_block_comment(" /* + 5"), false);
+ }
+
+ #[test]
+ fn make_token_test() {
+ assert_eq!(make_token(TokenType::Word, &"const".to_string(), 56), Token {
+ token_type: TokenType::Word,
+ value: "const".to_string(),
+ start: 56,
+ end: 61,
+ });
+ }
+
+ #[test]
+ fn return_token_at_index_test() {
+ assert_eq!(return_token_at_index("const", 0), Some(Token {
+ token_type: TokenType::Word,
+ value: "const".to_string(),
+ start: 0,
+ end: 5,
+ }));
+ assert_eq!(return_token_at_index(" 4554", 2),
+ Some(Token {
+ token_type: TokenType::Number,
+ value: "4554".to_string(),
+ start: 2,
+ end: 6,
+ })
+ );
+ }
+
+ #[test]
+ fn lexer_test() {
+ assert_eq!(lexer("const a=5"), vec![
+ Token {
+ token_type: TokenType::Word,
+ value: "const".to_string(),
+ start: 0,
+ end: 5,
+ },
+ Token {
+ token_type: TokenType::Whitespace,
+ value: " ".to_string(),
+ start: 5,
+ end: 6,
+ },
+ Token {
+ token_type: TokenType::Word,
+ value: "a".to_string(),
+ start: 6,
+ end: 7,
+ },
+ Token {
+ token_type: TokenType::Operator,
+ value: "=".to_string(),
+ start: 7,
+ end: 8,
+ },
+ Token {
+ token_type: TokenType::Number,
+ value: "5".to_string(),
+ start: 8,
+ end: 9,
+ },
+ ]);
+ assert_eq!(lexer("54 + 22500 + 6"), vec![
+ Token {
+ token_type: TokenType::Number,
+ value: "54".to_string(),
+ start: 0,
+ end: 2,
+ },
+ Token {
+ token_type: TokenType::Whitespace,
+ value: " ".to_string(),
+ start: 2,
+ end: 3,
+ },
+ Token {
+ token_type: TokenType::Operator,
+ value: "+".to_string(),
+ start: 3,
+ end: 4,
+ },
+ Token {
+ token_type: TokenType::Whitespace,
+ value: " ".to_string(),
+ start: 4,
+ end: 5,
+ },
+ Token {
+ token_type: TokenType::Number,
+ value: "22500".to_string(),
+ start: 5,
+ end: 10,
+ },
+ Token {
+ token_type: TokenType::Whitespace,
+ value: " ".to_string(),
+ start: 10,
+ end: 11,
+ },
+ Token {
+ token_type: TokenType::Operator,
+ value: "+".to_string(),
+ start: 11,
+ end: 12,
+ },
+ Token {
+ token_type: TokenType::Whitespace,
+ value: " ".to_string(),
+ start: 12,
+ end: 13,
+ },
+ Token {
+ token_type: TokenType::Number,
+ value: "6".to_string(),
+ start: 13,
+ end: 14,
+ },
+ ]);
+ }
+}
diff --git a/yarn.lock b/yarn.lock
index fb1e6cf6e..1b5be7f79 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3228,6 +3228,13 @@ ast-types-flow@^0.0.7:
resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0=
+async@^2.6.4:
+ version "2.6.4"
+ resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
+ integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==
+ dependencies:
+ lodash "^4.17.14"
+
async@^3.2.3:
version "3.2.4"
resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c"
@@ -3265,6 +3272,13 @@ axe-core@^4.4.3:
resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.5.1.tgz#04d561c11b6d76d096d34e9d14ba2c294fb20cdc"
integrity sha512-1exVbW0X1O/HSr/WMwnaweyqcWOgZgLiVxdLG34pvSQk4NlYQr9OUy0JLwuhFfuVNQzzqgH57eYzkFBCb3bIsQ==
+axios@^0.21.1:
+ version "0.21.4"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
+ integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
+ dependencies:
+ follow-redirects "^1.14.0"
+
axobject-query@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be"
@@ -3416,6 +3430,13 @@ base16@^1.0.0:
resolved "https://registry.yarnpkg.com/base16/-/base16-1.0.0.tgz#e297f60d7ec1014a7a971a39ebc8a98c0b681e70"
integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==
+basic-auth@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/basic-auth/-/basic-auth-2.0.1.tgz#b998279bf47ce38344b4f3cf916d4679bbf51e3a"
+ integrity sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==
+ dependencies:
+ safe-buffer "5.1.2"
+
batch@0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16"
@@ -3448,6 +3469,15 @@ binary-extensions@^2.0.0:
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c"
integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==
+binary-install@^0.1.0:
+ version "0.1.1"
+ resolved "https://registry.yarnpkg.com/binary-install/-/binary-install-0.1.1.tgz#c1b22f174581764e5c52cd16664cf1d287e38bd4"
+ integrity sha512-DqED0D/6LrS+BHDkKn34vhRqOGjy5gTMgvYZsGK2TpNbdPuz4h+MRlNgGv5QBRd7pWq/jylM4eKNCizgAq3kNQ==
+ dependencies:
+ axios "^0.21.1"
+ rimraf "^3.0.2"
+ tar "^6.1.0"
+
bluebird@^3.5.5:
version "3.7.2"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
@@ -3688,6 +3718,11 @@ chokidar@^3.4.2, chokidar@^3.5.3:
optionalDependencies:
fsevents "~2.3.2"
+chownr@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece"
+ integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==
+
chrome-trace-event@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4"
@@ -3920,6 +3955,11 @@ core-util-is@~1.0.0:
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
+corser@^2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/corser/-/corser-2.0.1.tgz#8eda252ecaab5840dcd975ceb90d9370c819ff87"
+ integrity sha512-utCYNzRSQIZNPIcGZdQc92UVJYAhtGAteCFg0yRaFm8f0P+CPtyGyHXJcGXnffjCybUCEx3FQ2G7U3/o9eIkVQ==
+
cosmiconfig@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982"
@@ -5249,6 +5289,11 @@ follow-redirects@^1.0.0:
dependencies:
debug "^3.0.0"
+follow-redirects@^1.14.0:
+ version "1.15.2"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
+ integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
+
for-each@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e"
@@ -5318,6 +5363,13 @@ fs-extra@^9.0.0, fs-extra@^9.0.1:
jsonfile "^6.0.1"
universalify "^2.0.0"
+fs-minipass@^2.0.0:
+ version "2.1.0"
+ resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
+ integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==
+ dependencies:
+ minipass "^3.0.0"
+
fs-monkey@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3"
@@ -5588,6 +5640,13 @@ html-encoding-sniffer@^2.0.1:
dependencies:
whatwg-encoding "^1.0.5"
+html-encoding-sniffer@^3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz#2cb1a8cf0db52414776e5b2a7a04d5dd98158de9"
+ integrity sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==
+ dependencies:
+ whatwg-encoding "^2.0.0"
+
html-entities@^2.1.0, html-entities@^2.3.2:
version "2.3.3"
resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.3.tgz#117d7626bece327fc8baace8868fa6f5ef856e46"
@@ -5697,6 +5756,25 @@ http-proxy@^1.18.1:
follow-redirects "^1.0.0"
requires-port "^1.0.0"
+http-server@^14.1.1:
+ version "14.1.1"
+ resolved "https://registry.yarnpkg.com/http-server/-/http-server-14.1.1.tgz#d60fbb37d7c2fdff0f0fbff0d0ee6670bd285e2e"
+ integrity sha512-+cbxadF40UXd9T01zUHgA+rlo2Bg1Srer4+B4NwIHdaGxAGGv59nYRnGGDJ9LBk7alpS0US+J+bLLdQOOkJq4A==
+ dependencies:
+ basic-auth "^2.0.1"
+ chalk "^4.1.2"
+ corser "^2.0.1"
+ he "^1.2.0"
+ html-encoding-sniffer "^3.0.0"
+ http-proxy "^1.18.1"
+ mime "^1.6.0"
+ minimist "^1.2.6"
+ opener "^1.5.1"
+ portfinder "^1.0.28"
+ secure-compare "3.0.1"
+ union "~0.5.0"
+ url-join "^4.0.1"
+
https-proxy-agent@^5.0.0:
version "5.0.1"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6"
@@ -5717,7 +5795,7 @@ iconv-lite@0.4.24:
dependencies:
safer-buffer ">= 2.1.2 < 3"
-iconv-lite@^0.6.3:
+iconv-lite@0.6.3, iconv-lite@^0.6.3:
version "0.6.3"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501"
integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==
@@ -5832,7 +5910,7 @@ ipaddr.js@^2.0.1:
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0"
integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng==
-is-arguments@^1.1.0, is-arguments@^1.1.1:
+is-arguments@^1.0.4, is-arguments@^1.1.0, is-arguments@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b"
integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==
@@ -5921,6 +5999,13 @@ is-generator-fn@^2.0.0:
resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118"
integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==
+is-generator-function@^1.0.7:
+ version "1.0.10"
+ resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72"
+ integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==
+ dependencies:
+ has-tostringtag "^1.0.0"
+
is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
@@ -6050,7 +6135,7 @@ is-symbol@^1.0.3:
dependencies:
has-symbols "^1.0.2"
-is-typed-array@^1.1.10:
+is-typed-array@^1.1.10, is-typed-array@^1.1.3:
version "1.1.10"
resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.10.tgz#36a5b5cb4189b575d1a3e4b08536bfb485801e3f"
integrity sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==
@@ -6999,7 +7084,7 @@ lodash.uniq@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
-lodash@4.17.21, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0:
+lodash@4.17.21, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.7.0:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@@ -7142,7 +7227,7 @@ mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.34:
dependencies:
mime-db "1.52.0"
-mime@1.6.0:
+mime@1.6.0, mime@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
@@ -7200,6 +7285,38 @@ minimist@^1.2.6:
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18"
integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==
+minipass@^3.0.0:
+ version "3.3.6"
+ resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
+ integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
+ dependencies:
+ yallist "^4.0.0"
+
+minipass@^4.0.0:
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/minipass/-/minipass-4.0.3.tgz#00bfbaf1e16e35e804f4aa31a7c1f6b8d9f0ee72"
+ integrity sha512-OW2r4sQ0sI+z5ckEt5c1Tri4xTgZwYDxpE54eqWlQloQRoWtXjqt9udJ5Z4dSv7wK+nfFI7FRXyCpBSft+gpFw==
+
+minizlib@^2.1.1:
+ version "2.1.2"
+ resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
+ integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==
+ dependencies:
+ minipass "^3.0.0"
+ yallist "^4.0.0"
+
+mkdirp@^0.5.6:
+ version "0.5.6"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6"
+ integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==
+ dependencies:
+ minimist "^1.2.6"
+
+mkdirp@^1.0.3:
+ version "1.0.4"
+ resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e"
+ integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==
+
mkdirp@~0.5.1:
version "0.5.3"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.3.tgz#5a514b7179259287952881e94410ec5465659f8c"
@@ -7489,6 +7606,11 @@ open@^8.0.9, open@^8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
+opener@^1.5.1:
+ version "1.5.2"
+ resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
+ integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==
+
opentype.js@^1.3.3:
version "1.3.4"
resolved "https://registry.yarnpkg.com/opentype.js/-/opentype.js-1.3.4.tgz#1c0e72e46288473cc4a4c6a2dc60fd7fe6020d77"
@@ -7711,6 +7833,15 @@ pkg-up@^3.1.0:
dependencies:
find-up "^3.0.0"
+portfinder@^1.0.28:
+ version "1.0.32"
+ resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.32.tgz#2fe1b9e58389712429dc2bea5beb2146146c7f81"
+ integrity sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==
+ dependencies:
+ async "^2.6.4"
+ debug "^3.2.7"
+ mkdirp "^0.5.6"
+
postcss-attribute-case-insensitive@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-5.0.2.tgz#03d761b24afc04c09e757e92ff53716ae8ea2741"
@@ -8419,7 +8550,7 @@ q@^1.1.2:
resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=
-qs@6.11.0:
+qs@6.11.0, qs@^6.4.0:
version "6.11.0"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
@@ -9066,6 +9197,11 @@ schema-utils@^4.0.0:
ajv-formats "^2.1.1"
ajv-keywords "^5.0.0"
+secure-compare@3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/secure-compare/-/secure-compare-3.0.1.tgz#f1a0329b308b221fae37b9974f3d578d0ca999e3"
+ integrity sha512-AckIIV90rPDcBcglUwXPF3kg0P0qmPsPXAj6BBEENQE1p5yA1xfmDJzfi1Tappj37Pv2mVbKpL3Z1T+Nn7k1Qw==
+
select-hose@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca"
@@ -9683,6 +9819,18 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0:
resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0"
integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==
+tar@^6.1.0:
+ version "6.1.13"
+ resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"
+ integrity sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==
+ dependencies:
+ chownr "^2.0.0"
+ fs-minipass "^2.0.0"
+ minipass "^4.0.0"
+ minizlib "^2.1.1"
+ mkdirp "^1.0.3"
+ yallist "^4.0.0"
+
temp-dir@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-2.0.0.tgz#bde92b05bdfeb1516e804c9c00ad45177f31321e"
@@ -10006,6 +10154,13 @@ unicode-property-aliases-ecmascript@^2.0.0:
resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd"
integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==
+union@~0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/union/-/union-0.5.0.tgz#b2c11be84f60538537b846edb9ba266ba0090075"
+ integrity sha512-N6uOhuW6zO95P3Mel2I2zMsbsanvvtgn6jVqJv4vbVcz/JN0OkL9suomjQGmWtxJQXOCqUJvquc1sMeNz/IwlA==
+ dependencies:
+ qs "^6.4.0"
+
uniq@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff"
@@ -10058,6 +10213,11 @@ uri-js@^4.2.2:
dependencies:
punycode "^2.1.0"
+url-join@^4.0.1:
+ version "4.0.1"
+ resolved "https://registry.yarnpkg.com/url-join/-/url-join-4.0.1.tgz#b642e21a2646808ffa178c4c5fda39844e12cde7"
+ integrity sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==
+
url-parse@^1.5.3:
version "1.5.10"
resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1"
@@ -10110,6 +10270,17 @@ util.promisify@~1.0.0:
has-symbols "^1.0.1"
object.getownpropertydescriptors "^2.1.0"
+util@^0.12.5:
+ version "0.12.5"
+ resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc"
+ integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==
+ dependencies:
+ inherits "^2.0.3"
+ is-arguments "^1.0.4"
+ is-generator-function "^1.0.7"
+ is-typed-array "^1.1.3"
+ which-typed-array "^1.1.2"
+
utila@~0.4:
version "0.4.0"
resolved "https://registry.yarnpkg.com/utila/-/utila-0.4.0.tgz#8a16a05d445657a3aea5eecc5b12a4fa5379772c"
@@ -10170,6 +10341,13 @@ walker@^1.0.7:
dependencies:
makeerror "1.0.x"
+wasm-pack@^0.10.3:
+ version "0.10.3"
+ resolved "https://registry.yarnpkg.com/wasm-pack/-/wasm-pack-0.10.3.tgz#2d7dd78ba539c34b3817e2249c3f30c646c84b69"
+ integrity sha512-dg1PPyp+QwWrhfHsgG12K/y5xzwfaAoK1yuVC/DUAuQsDy5JywWDuA7Y/ionGwQz+JBZVw8jknaKBnaxaJfwTA==
+ dependencies:
+ binary-install "^0.1.0"
+
watchpack@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d"
@@ -10355,6 +10533,13 @@ whatwg-encoding@^1.0.5:
dependencies:
iconv-lite "0.4.24"
+whatwg-encoding@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz#e7635f597fd87020858626805a2729fa7698ac53"
+ integrity sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==
+ dependencies:
+ iconv-lite "0.6.3"
+
whatwg-fetch@^3.6.2:
version "3.6.2"
resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c"
@@ -10412,7 +10597,7 @@ which-collection@^1.0.1:
is-weakmap "^2.0.1"
is-weakset "^2.0.1"
-which-typed-array@^1.1.8:
+which-typed-array@^1.1.2, which-typed-array@^1.1.8:
version "1.1.9"
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6"
integrity sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==