start of fuzzing (#405)
* fuzzing Signed-off-by: Jess Frazelle <github@jessfraz.com> * more tests Signed-off-by: Jess Frazelle <github@jessfraz.com> * unicode Signed-off-by: Jess Frazelle <github@jessfraz.com> * more fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix clippy Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * fixes Signed-off-by: Jess Frazelle <github@jessfraz.com> * one more Signed-off-by: Jess Frazelle <github@jessfraz.com> * one more Signed-off-by: Jess Frazelle <github@jessfraz.com> * last one Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
21
README.md
21
README.md
@ -86,3 +86,24 @@ The PR may serve as a place to discuss the human-readable changelog and extra QA
|
||||
3. Create a new release and tag pointing to the bump version commit using semantic versioning `v{x}.{y}.{z}`
|
||||
|
||||
4. A new Action kicks in at https://github.com/KittyCAD/modeling-app/actions, uploading artifacts to the release
|
||||
|
||||
## Fuzzing the parser
|
||||
|
||||
Make sure you install cargo fuzz:
|
||||
|
||||
```bash
|
||||
$ cargo install cargo-fuzz
|
||||
```
|
||||
|
||||
```bash
|
||||
$ cd src/wasm-lib/kcl
|
||||
|
||||
# list the fuzz targets
|
||||
$ cargo fuzz list
|
||||
|
||||
# run the parser fuzzer
|
||||
$ cargo +nightly fuzz run parser
|
||||
```
|
||||
|
||||
For more information on fuzzing you can check out
|
||||
[this guide](https://rust-fuzz.github.io/book/cargo-fuzz.html).
|
||||
|
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
4
src/wasm-lib/kcl/fuzz/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
target
|
||||
corpus
|
||||
artifacts
|
||||
coverage
|
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
2218
src/wasm-lib/kcl/fuzz/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
27
src/wasm-lib/kcl/fuzz/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "kcl-lib-fuzz"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata]
|
||||
cargo-fuzz = true
|
||||
|
||||
[dependencies]
|
||||
libfuzzer-sys = "0.4"
|
||||
|
||||
[dependencies.kcl-lib]
|
||||
path = ".."
|
||||
|
||||
# Prevent this from interfering with workspaces
|
||||
[workspace]
|
||||
members = ["."]
|
||||
|
||||
[profile.release]
|
||||
debug = 1
|
||||
|
||||
[[bin]]
|
||||
name = "parser"
|
||||
path = "fuzz_targets/parser.rs"
|
||||
test = false
|
||||
doc = false
|
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
14
src/wasm-lib/kcl/fuzz/fuzz_targets/parser.rs
Normal file
@ -0,0 +1,14 @@
|
||||
#![no_main]
|
||||
#[macro_use]
|
||||
extern crate libfuzzer_sys;
|
||||
extern crate kcl_lib;
|
||||
|
||||
fuzz_target!(|data: &[u8]| {
|
||||
if let Ok(s) = std::str::from_utf8(data) {
|
||||
let tokens = kcl_lib::tokeniser::lexer(s);
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
if let Ok(_) = parser.ast() {
|
||||
println!("OK");
|
||||
}
|
||||
}
|
||||
});
|
@ -642,7 +642,7 @@ pub fn execute(
|
||||
for (index, param) in function_expression.params.iter().enumerate() {
|
||||
fn_memory.add(
|
||||
¶m.name,
|
||||
args.clone().get(index).unwrap().clone(),
|
||||
args.get(index).unwrap().clone(),
|
||||
param.into(),
|
||||
)?;
|
||||
}
|
||||
|
@ -228,8 +228,8 @@ impl ReversePolishNotation {
|
||||
.collect::<Vec<Token>>(),
|
||||
);
|
||||
return rpn.parse();
|
||||
} else if current_token.value == ")" {
|
||||
if !self.operators.is_empty() && self.operators[self.operators.len() - 1].value != "(" {
|
||||
} else if current_token.value == ")" && !self.operators.is_empty() {
|
||||
if self.operators[self.operators.len() - 1].value != "(" {
|
||||
// pop operators off the stack and push them to postFix until we find the matching '('
|
||||
let rpn = ReversePolishNotation::new(
|
||||
&self.parser.tokens,
|
||||
|
@ -170,13 +170,6 @@ impl Parser {
|
||||
}));
|
||||
}
|
||||
|
||||
if index >= self.tokens.len() {
|
||||
return Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![self.tokens.last().unwrap().into()],
|
||||
message: "unexpected end".to_string(),
|
||||
}));
|
||||
}
|
||||
|
||||
let Some(token) = self.tokens.get(index) else {
|
||||
return Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![self.tokens.last().unwrap().into()],
|
||||
@ -354,6 +347,15 @@ impl Parser {
|
||||
}
|
||||
|
||||
fn next_meaningful_token(&self, index: usize, offset: Option<usize>) -> Result<TokenReturnWithNonCode, KclError> {
|
||||
// There is no next meaningful token.
|
||||
if index >= self.tokens.len() - 1 {
|
||||
return Ok(TokenReturnWithNonCode {
|
||||
token: None,
|
||||
index: self.tokens.len() - 1,
|
||||
non_code_node: None,
|
||||
});
|
||||
}
|
||||
|
||||
let new_index = index + offset.unwrap_or(1);
|
||||
let Ok(token) = self.get_token(new_index) else {
|
||||
return Ok(TokenReturnWithNonCode {
|
||||
@ -412,7 +414,7 @@ impl Parser {
|
||||
if found_another_opening_brace {
|
||||
return self.find_closing_brace(index + 1, brace_count + 1, search_opening_brace);
|
||||
}
|
||||
if found_another_closing_brace {
|
||||
if found_another_closing_brace && brace_count > 0 {
|
||||
return self.find_closing_brace(index + 1, brace_count - 1, search_opening_brace);
|
||||
}
|
||||
// non-brace token, increment and continue
|
||||
@ -617,6 +619,12 @@ impl Parser {
|
||||
fn make_member_expression(&self, index: usize) -> Result<MemberExpressionReturn, KclError> {
|
||||
let current_token = self.get_token(index)?;
|
||||
let mut keys_info = self.collect_object_keys(index, None)?;
|
||||
if keys_info.is_empty() {
|
||||
return Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![current_token.into()],
|
||||
message: "expected to be started on a identifier or literal".to_string(),
|
||||
}));
|
||||
}
|
||||
let last_key = keys_info[keys_info.len() - 1].clone();
|
||||
let first_key = keys_info.remove(0);
|
||||
let root = self.make_identifier(index)?;
|
||||
@ -858,6 +866,8 @@ impl Parser {
|
||||
fn make_array_expression(&self, index: usize) -> Result<ArrayReturn, KclError> {
|
||||
let opening_brace_token = self.get_token(index)?;
|
||||
let first_element_token = self.next_meaningful_token(index, None)?;
|
||||
// Make sure there is a closing brace.
|
||||
let _closing_brace = self.find_closing_brace(index, 0, "")?;
|
||||
let array_elements = self.make_array_elements(first_element_token.index, Vec::new())?;
|
||||
Ok(ArrayReturn {
|
||||
expression: ArrayExpression {
|
||||
@ -1029,7 +1039,7 @@ impl Parser {
|
||||
} else {
|
||||
return Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![argument_token_token.clone().into()],
|
||||
message: format!("Unexpected token {} ", argument_token_token.value),
|
||||
message: format!("Unexpected token {}", argument_token_token.value),
|
||||
}));
|
||||
};
|
||||
}
|
||||
@ -1054,18 +1064,18 @@ impl Parser {
|
||||
|
||||
Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![argument_token_token.clone().into()],
|
||||
message: format!("Unexpected token {} ", argument_token_token.value),
|
||||
message: format!("Unexpected token {}", argument_token_token.value),
|
||||
}))
|
||||
} else {
|
||||
Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![brace_or_comma_token.into()],
|
||||
message: format!("Unexpected token {} ", brace_or_comma_token.value),
|
||||
message: format!("Unexpected token {}", brace_or_comma_token.value),
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![brace_or_comma_token.into()],
|
||||
message: format!("Unexpected token {} ", brace_or_comma_token.value),
|
||||
message: format!("Unexpected token {}", brace_or_comma_token.value),
|
||||
}))
|
||||
}
|
||||
}
|
||||
@ -1074,6 +1084,8 @@ impl Parser {
|
||||
let current_token = self.get_token(index)?;
|
||||
let brace_token = self.next_meaningful_token(index, None)?;
|
||||
let callee = self.make_identifier(index)?;
|
||||
// Make sure there is a closing brace.
|
||||
let _closing_brace_token = self.find_closing_brace(brace_token.index, 0, "")?;
|
||||
let args = self.make_arguments(brace_token.index, vec![])?;
|
||||
let closing_brace_token = self.get_token(args.last_index)?;
|
||||
let function = if let Some(stdlib_fn) = self.stdlib.get(&callee.name) {
|
||||
@ -1119,7 +1131,7 @@ impl Parser {
|
||||
let Some(assignment_token) = assignment.token else {
|
||||
return Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![current_token.clone().into()],
|
||||
message: format!("Unexpected token {} ", current_token.value),
|
||||
message: format!("Unexpected token {}", current_token.value),
|
||||
}));
|
||||
};
|
||||
|
||||
@ -1195,7 +1207,7 @@ impl Parser {
|
||||
} else {
|
||||
Err(KclError::Unimplemented(KclErrorDetails {
|
||||
source_ranges: vec![brace_or_comma_token.into()],
|
||||
message: format!("Unexpected token {} ", brace_or_comma_token.value),
|
||||
message: format!("Unexpected token {}", brace_or_comma_token.value),
|
||||
}))
|
||||
}
|
||||
}
|
||||
@ -1203,6 +1215,12 @@ impl Parser {
|
||||
fn make_unary_expression(&self, index: usize) -> Result<UnaryExpressionResult, KclError> {
|
||||
let current_token = self.get_token(index)?;
|
||||
let next_token = self.next_meaningful_token(index, None)?;
|
||||
if next_token.token.is_none() {
|
||||
return Err(KclError::Syntax(KclErrorDetails {
|
||||
source_ranges: vec![current_token.into()],
|
||||
message: "expected another token".to_string(),
|
||||
}));
|
||||
}
|
||||
let argument = self.make_value(next_token.index)?;
|
||||
let argument_token = self.get_token(argument.last_index)?;
|
||||
Ok(UnaryExpressionResult {
|
||||
@ -1243,7 +1261,6 @@ impl Parser {
|
||||
return Ok(ExpressionStatementResult {
|
||||
expression: ExpressionStatement {
|
||||
start: current_token.start,
|
||||
// end: call_expression.last_index,
|
||||
end,
|
||||
expression: Value::CallExpression(Box::new(call_expression.expression)),
|
||||
},
|
||||
@ -1325,6 +1342,8 @@ impl Parser {
|
||||
|
||||
fn make_object_expression(&self, index: usize) -> Result<ObjectExpressionResult, KclError> {
|
||||
let opening_brace_token = self.get_token(index)?;
|
||||
// Make sure there is a closing brace.
|
||||
let _closing_brace = self.find_closing_brace(index, 0, "")?;
|
||||
let first_property_token = self.next_meaningful_token(index, None)?;
|
||||
let object_properties = self.make_object_properties(first_property_token.index, vec![])?;
|
||||
Ok(ObjectExpressionResult {
|
||||
@ -2762,4 +2781,104 @@ const secondExtrude = startSketchAt([0,0])
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_greater_bang() {
|
||||
let tokens = crate::tokeniser::lexer(">!");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_z_percent_parens() {
|
||||
let tokens = crate::tokeniser::lexer("z%)");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("Unexpected token"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_parens_unicode() {
|
||||
let tokens = crate::tokeniser::lexer("(ޜ");
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_nested_open_brackets() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
r#"
|
||||
z(-[["#,
|
||||
);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_new_line_function() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
r#"z
|
||||
(--#"#,
|
||||
);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_lots_of_fancy_brackets() {
|
||||
let tokens = crate::tokeniser::lexer(r#"zz({{{{{{{{)iegAng{{{{{{{##"#);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("unexpected end"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_close_before_open() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
r#"fn)n
|
||||
e
|
||||
["#,
|
||||
);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("expected to be started on a identifier or literal"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_close_before_nada() {
|
||||
let tokens = crate::tokeniser::lexer(r#"fn)n-"#);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result.err().unwrap().to_string().contains("expected another token"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_weird_lots_of_slashes() {
|
||||
let tokens = crate::tokeniser::lexer(
|
||||
r#"J///////////o//+///////////P++++*++++++P///////˟
|
||||
++4"#,
|
||||
);
|
||||
let parser = Parser::new(tokens);
|
||||
let result = parser.ast();
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.err()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.contains("unexpected end of expression"));
|
||||
}
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ fn make_token(token_type: TokenType, value: &str, start: usize) -> Token {
|
||||
}
|
||||
|
||||
fn return_token_at_index(s: &str, start_index: usize) -> Option<Token> {
|
||||
let str_from_index = &s[start_index..];
|
||||
let str_from_index = &s.chars().skip(start_index).collect::<String>();
|
||||
if is_string(str_from_index) {
|
||||
return Some(make_token(
|
||||
TokenType::String,
|
||||
|
Reference in New Issue
Block a user