Rust style + performance tweaks (#285)

* Rust style tweaks

* Use references instead of cloning

* Further reduce allocations

* Reduce allocations
This commit is contained in:
Adam Chalmers
2023-08-18 13:23:18 -05:00
committed by GitHub
parent a6d0f17970
commit 3b9094e0dd
4 changed files with 163 additions and 177 deletions

View File

@ -3,8 +3,8 @@
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
#[wasm_bindgen] #[wasm_bindgen]
pub fn deserialize_files(data: Vec<u8>) -> Result<JsValue, JsError> { pub fn deserialize_files(data: &[u8]) -> Result<JsValue, JsError> {
let ws_resp: kittycad::types::WebSocketResponses = bincode::deserialize(&data)?; let ws_resp: kittycad::types::WebSocketResponses = bincode::deserialize(data)?;
if let kittycad::types::WebSocketResponses::Export { files } = ws_resp { if let kittycad::types::WebSocketResponses::Export { files } = ws_resp {
return Ok(serde_wasm_bindgen::to_value(&files)?); return Ok(serde_wasm_bindgen::to_value(&files)?);

View File

@ -19,9 +19,9 @@ pub fn precedence(operator: &str) -> u8 {
} }
pub fn reverse_polish_notation( pub fn reverse_polish_notation(
tokens: &Vec<Token>, tokens: &[Token],
previous_postfix: &Vec<Token>, previous_postfix: &[Token],
operators: &Vec<Token>, operators: &[Token],
) -> Result<Vec<Token>, KclError> { ) -> Result<Vec<Token>, KclError> {
if tokens.is_empty() { if tokens.is_empty() {
return Ok(previous_postfix return Ok(previous_postfix
@ -33,46 +33,46 @@ pub fn reverse_polish_notation(
let current_token = &tokens[0]; let current_token = &tokens[0];
let next = tokens.get(1); let next = tokens.get(1);
if current_token.token_type == TokenType::Word { if current_token.token_type == TokenType::Word {
if let Some(next) = next { return if let Some(next) = next {
if next.token_type == TokenType::Brace && next.value == "(" { if next.token_type == TokenType::Brace && next.value == "(" {
let closing_brace = find_closing_brace(tokens, 1, 0, "")?; let closing_brace = find_closing_brace(tokens, 1, 0, "")?;
return reverse_polish_notation( reverse_polish_notation(
&tokens[closing_brace + 1..].to_vec(), &tokens[closing_brace + 1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
.chain(tokens[0..closing_brace + 1].iter().cloned()) .chain(tokens[0..closing_brace + 1].iter().cloned())
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
operators, operators,
); )
} else { } else {
return reverse_polish_notation( reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
.chain(vec![current_token.clone()]) .chain(vec![current_token.clone()])
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
operators, operators,
); )
} }
} else { } else {
return reverse_polish_notation( reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
.chain(vec![current_token.clone()]) .chain(vec![current_token.clone()])
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
operators, operators,
); )
} };
} else if current_token.token_type == TokenType::Number } else if current_token.token_type == TokenType::Number
|| current_token.token_type == TokenType::Word || current_token.token_type == TokenType::Word
|| current_token.token_type == TokenType::String || current_token.token_type == TokenType::String
{ {
return reverse_polish_notation( return reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
@ -91,11 +91,11 @@ pub fn reverse_polish_notation(
.cloned() .cloned()
.chain(vec![operators[operators.len() - 1].clone()]) .chain(vec![operators[operators.len() - 1].clone()])
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
&operators[0..operators.len() - 1].to_vec(), &operators[0..operators.len() - 1],
); );
} }
return reverse_polish_notation( return reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
previous_postfix, previous_postfix,
&operators &operators
.iter() .iter()
@ -107,7 +107,7 @@ pub fn reverse_polish_notation(
// push current token to both stacks as it is a legitimate operator // push current token to both stacks as it is a legitimate operator
// but later we'll need to pop other operators off the stack until we find the matching ')' // but later we'll need to pop other operators off the stack until we find the matching ')'
return reverse_polish_notation( return reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
@ -129,21 +129,21 @@ pub fn reverse_polish_notation(
.cloned() .cloned()
.chain(vec![operators[operators.len() - 1].clone()]) .chain(vec![operators[operators.len() - 1].clone()])
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
&operators[0..operators.len() - 1].to_vec(), &operators[0..operators.len() - 1],
); );
} }
return reverse_polish_notation( return reverse_polish_notation(
&tokens[1..].to_vec(), &tokens[1..],
&previous_postfix &previous_postfix
.iter() .iter()
.cloned() .cloned()
.chain(vec![current_token.clone()]) .chain(vec![current_token.clone()])
.collect::<Vec<Token>>(), .collect::<Vec<Token>>(),
&operators[0..operators.len() - 1].to_vec(), &operators[0..operators.len() - 1],
); );
} }
if is_not_code_token(current_token) { if is_not_code_token(current_token) {
return reverse_polish_notation(&tokens[1..].to_vec(), previous_postfix, operators); return reverse_polish_notation(&tokens[1..], previous_postfix, operators);
} }
Err(KclError::Syntax(KclErrorDetails { Err(KclError::Syntax(KclErrorDetails {
@ -215,7 +215,7 @@ pub enum MathExpression {
} }
fn build_tree( fn build_tree(
reverse_polish_notation_tokens: Vec<Token>, reverse_polish_notation_tokens: &[Token],
stack: Vec<MathExpression>, stack: Vec<MathExpression>,
) -> Result<BinaryExpression, KclError> { ) -> Result<BinaryExpression, KclError> {
if reverse_polish_notation_tokens.is_empty() { if reverse_polish_notation_tokens.is_empty() {
@ -284,18 +284,18 @@ fn build_tree(
start_extended: None, start_extended: None,
}, },
))); )));
return build_tree(reverse_polish_notation_tokens[1..].to_vec(), new_stack); return build_tree(&reverse_polish_notation_tokens[1..], new_stack);
} else if current_token.token_type == TokenType::Word { } else if current_token.token_type == TokenType::Word {
if reverse_polish_notation_tokens[1].token_type == TokenType::Brace if reverse_polish_notation_tokens[1].token_type == TokenType::Brace
&& reverse_polish_notation_tokens[1].value == "(" && reverse_polish_notation_tokens[1].value == "("
{ {
let closing_brace = find_closing_brace(&reverse_polish_notation_tokens, 1, 0, "")?; let closing_brace = find_closing_brace(reverse_polish_notation_tokens, 1, 0, "")?;
let mut new_stack = stack; let mut new_stack = stack;
new_stack.push(MathExpression::CallExpression(Box::new( new_stack.push(MathExpression::CallExpression(Box::new(
make_call_expression(&reverse_polish_notation_tokens, 0)?.expression, make_call_expression(reverse_polish_notation_tokens, 0)?.expression,
))); )));
return build_tree( return build_tree(
reverse_polish_notation_tokens[closing_brace + 1..].to_vec(), &reverse_polish_notation_tokens[closing_brace + 1..],
new_stack, new_stack,
); );
} }
@ -305,7 +305,7 @@ fn build_tree(
start: current_token.start, start: current_token.start,
end: current_token.end, end: current_token.end,
}))); })));
return build_tree(reverse_polish_notation_tokens[1..].to_vec(), new_stack); return build_tree(&reverse_polish_notation_tokens[1..], new_stack);
} else if current_token.token_type == TokenType::Brace && current_token.value == "(" { } else if current_token.token_type == TokenType::Brace && current_token.value == "(" {
let mut new_stack = stack; let mut new_stack = stack;
new_stack.push(MathExpression::ParenthesisToken(Box::new( new_stack.push(MathExpression::ParenthesisToken(Box::new(
@ -316,7 +316,7 @@ fn build_tree(
token_type: MathTokenType::Parenthesis, token_type: MathTokenType::Parenthesis,
}, },
))); )));
return build_tree(reverse_polish_notation_tokens[1..].to_vec(), new_stack); return build_tree(&reverse_polish_notation_tokens[1..], new_stack);
} else if current_token.token_type == TokenType::Brace && current_token.value == ")" { } else if current_token.token_type == TokenType::Brace && current_token.value == ")" {
let inner_node: MathExpression = match &stack[stack.len() - 1] { let inner_node: MathExpression = match &stack[stack.len() - 1] {
MathExpression::ExtendedBinaryExpression(bin_exp) => { MathExpression::ExtendedBinaryExpression(bin_exp) => {
@ -387,7 +387,7 @@ fn build_tree(
}; };
let mut new_stack = stack[0..stack.len() - 2].to_vec(); let mut new_stack = stack[0..stack.len() - 2].to_vec();
new_stack.push(expression); new_stack.push(expression);
return build_tree(reverse_polish_notation_tokens[1..].to_vec(), new_stack); return build_tree(&reverse_polish_notation_tokens[1..], new_stack);
} }
let left: (BinaryPart, usize) = match &stack[stack.len() - 2] { let left: (BinaryPart, usize) = match &stack[stack.len() - 2] {
MathExpression::ExtendedBinaryExpression(bin_exp) => ( MathExpression::ExtendedBinaryExpression(bin_exp) => (
@ -470,12 +470,12 @@ fn build_tree(
let mut new_stack = stack[0..stack.len() - 2].to_vec(); let mut new_stack = stack[0..stack.len() - 2].to_vec();
new_stack.push(MathExpression::BinaryExpression(Box::new(tree))); new_stack.push(MathExpression::BinaryExpression(Box::new(tree)));
build_tree(reverse_polish_notation_tokens[1..].to_vec(), new_stack) build_tree(&reverse_polish_notation_tokens[1..], new_stack)
} }
pub fn parse_expression(tokens: Vec<Token>) -> Result<BinaryExpression, KclError> { pub fn parse_expression(tokens: &[Token]) -> Result<BinaryExpression, KclError> {
let rpn = reverse_polish_notation(&tokens, &vec![], &vec![])?; let rpn = reverse_polish_notation(tokens, &[], &[])?;
let tree_with_maybe_bad_top_level_start_end = build_tree(rpn, vec![])?; let tree_with_maybe_bad_top_level_start_end = build_tree(&rpn, vec![])?;
let left_start = match tree_with_maybe_bad_top_level_start_end.clone().left { let left_start = match tree_with_maybe_bad_top_level_start_end.clone().left {
BinaryPart::BinaryExpression(bin_exp) => bin_exp.start, BinaryPart::BinaryExpression(bin_exp) => bin_exp.start,
BinaryPart::Literal(lit) => lit.start, BinaryPart::Literal(lit) => lit.start,
@ -517,7 +517,7 @@ mod test {
#[test] #[test]
fn test_parse_expression() { fn test_parse_expression() {
let tokens = crate::tokeniser::lexer("1 + 2"); let tokens = crate::tokeniser::lexer("1 + 2");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -543,7 +543,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_plus_followed_by_star() { fn test_parse_expression_plus_followed_by_star() {
let tokens = crate::tokeniser::lexer("1 + 2 * 3"); let tokens = crate::tokeniser::lexer("1 + 2 * 3");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -580,7 +580,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_with_parentheses() { fn test_parse_expression_with_parentheses() {
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )"); let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 )");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -617,7 +617,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_parens_in_middle() { fn test_parse_expression_parens_in_middle() {
let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4"); let tokens = crate::tokeniser::lexer("1 * ( 2 + 3 ) / 4");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -665,7 +665,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_parans_and_predence() { fn test_parse_expression_parans_and_predence() {
let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4"); let tokens = crate::tokeniser::lexer("1 + ( 2 + 3 ) / 4");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -712,7 +712,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_nested() { fn test_parse_expression_nested() {
let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )"); let tokens = crate::tokeniser::lexer("1 * (( 2 + 3 ) / 4 + 5 )");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -770,7 +770,7 @@ mod test {
#[test] #[test]
fn test_parse_expression_redundant_braces() { fn test_parse_expression_redundant_braces() {
let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))"); let tokens = crate::tokeniser::lexer("1 * ((( 2 + 3 )))");
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -806,8 +806,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_simple() { fn test_reverse_polish_notation_simple() {
let result = let result = reverse_polish_notation(&crate::tokeniser::lexer("1 + 2"), &[], &[]).unwrap();
reverse_polish_notation(&crate::tokeniser::lexer("1 + 2"), &vec![], &vec![]).unwrap();
assert_eq!( assert_eq!(
result, result,
vec![ vec![
@ -836,8 +835,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_complex() { fn test_reverse_polish_notation_complex() {
let result = let result =
reverse_polish_notation(&crate::tokeniser::lexer("1 + 2 * 3"), &vec![], &vec![]) reverse_polish_notation(&crate::tokeniser::lexer("1 + 2 * 3"), &[], &[]).unwrap();
.unwrap();
assert_eq!( assert_eq!(
result, result,
vec![ vec![
@ -878,8 +876,7 @@ mod test {
#[test] #[test]
fn test_reverse_polish_notation_complex_with_parentheses() { fn test_reverse_polish_notation_complex_with_parentheses() {
let result = let result =
reverse_polish_notation(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &vec![], &vec![]) reverse_polish_notation(&crate::tokeniser::lexer("1 * ( 2 + 3 )"), &[], &[]).unwrap();
.unwrap();
assert_eq!( assert_eq!(
result, result,
vec![ vec![
@ -933,7 +930,7 @@ mod test {
fn test_parse_expression_redundant_braces_around_literal() { fn test_parse_expression_redundant_braces_around_literal() {
let code = "2 + (((3)))"; let code = "2 + (((3)))";
let tokens = crate::tokeniser::lexer(code); let tokens = crate::tokeniser::lexer(code);
let result = parse_expression(tokens).unwrap(); let result = parse_expression(&tokens).unwrap();
assert_eq!( assert_eq!(
result, result,
BinaryExpression { BinaryExpression {
@ -1018,7 +1015,7 @@ mod test {
})), })),
})), })),
}; };
let output = build_tree(input_tokens, vec![]).unwrap(); let output = build_tree(&input_tokens, vec![]).unwrap();
assert_eq!(output, expected_output); assert_eq!(output, expected_output);
} }
} }

View File

@ -64,7 +64,7 @@ pub fn is_not_code_token(token: &Token) -> bool {
|| token.token_type == TokenType::BlockComment || token.token_type == TokenType::BlockComment
} }
fn find_end_of_non_code_node(tokens: &Vec<Token>, index: usize) -> usize { fn find_end_of_non_code_node(tokens: &[Token], index: usize) -> usize {
if index == tokens.len() { if index == tokens.len() {
return index; return index;
} }
@ -75,7 +75,7 @@ fn find_end_of_non_code_node(tokens: &Vec<Token>, index: usize) -> usize {
index index
} }
fn make_none_code_node(tokens: &Vec<Token>, index: usize) -> (Option<NoneCodeNode>, usize) { fn make_none_code_node(tokens: &[Token], index: usize) -> (Option<NoneCodeNode>, usize) {
let current_token = &tokens[index]; let current_token = &tokens[index];
let end_index = if index == tokens.len() { let end_index = if index == tokens.len() {
index index
@ -110,7 +110,7 @@ struct TokenReturnWithNonCode {
} }
fn next_meaningful_token( fn next_meaningful_token(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
offset: Option<usize>, offset: Option<usize>,
) -> TokenReturnWithNonCode { ) -> TokenReturnWithNonCode {
@ -206,10 +206,7 @@ fn is_call_expression(tokens: &[Token], index: usize) -> Result<Option<usize>, K
Ok(None) Ok(None)
} }
fn find_next_declaration_keyword( fn find_next_declaration_keyword(tokens: &[Token], index: usize) -> Result<TokenReturn, KclError> {
tokens: &Vec<Token>,
index: usize,
) -> Result<TokenReturn, KclError> {
if index >= tokens.len() - 1 { if index >= tokens.len() - 1 {
return Ok(TokenReturn { return Ok(TokenReturn {
token: None, token: None,
@ -250,7 +247,7 @@ fn find_next_declaration_keyword(
} }
fn has_pipe_operator( fn has_pipe_operator(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
_limit_index: Option<usize>, _limit_index: Option<usize>,
) -> Result<TokenReturnWithNonCode, KclError> { ) -> Result<TokenReturnWithNonCode, KclError> {
@ -327,7 +324,7 @@ fn has_pipe_operator(
} }
fn collect_object_keys( fn collect_object_keys(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
_previous_keys: Option<Vec<ObjectKeyInfo>>, _previous_keys: Option<Vec<ObjectKeyInfo>>,
) -> Result<Vec<ObjectKeyInfo>, KclError> { ) -> Result<Vec<ObjectKeyInfo>, KclError> {
@ -404,7 +401,7 @@ pub struct MemberExpressionReturn {
} }
fn make_member_expression( fn make_member_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<MemberExpressionReturn, KclError> { ) -> Result<MemberExpressionReturn, KclError> {
let current_token = tokens[index].clone(); let current_token = tokens[index].clone();
@ -435,22 +432,23 @@ fn make_member_expression(
}) })
} }
fn find_end_of_binary_expression(tokens: &Vec<Token>, index: usize) -> Result<usize, KclError> { fn find_end_of_binary_expression(tokens: &[Token], index: usize) -> Result<usize, KclError> {
let current_token = tokens[index].clone(); let current_token = tokens[index].clone();
if current_token.token_type == TokenType::Brace && current_token.value == "(" { if current_token.token_type == TokenType::Brace && current_token.value == "(" {
let closing_parenthesis = find_closing_brace(tokens, index, 0, "")?; let closing_parenthesis = find_closing_brace(tokens, index, 0, "")?;
let maybe_another_operator = next_meaningful_token(tokens, closing_parenthesis, None); let maybe_another_operator = next_meaningful_token(tokens, closing_parenthesis, None);
if let Some(maybe_another_operator_token) = maybe_another_operator.token { return if let Some(maybe_another_operator_token) = maybe_another_operator.token {
if maybe_another_operator_token.token_type != TokenType::Operator if maybe_another_operator_token.token_type != TokenType::Operator
|| maybe_another_operator_token.value == "|>" || maybe_another_operator_token.value == "|>"
{ {
return Ok(closing_parenthesis); Ok(closing_parenthesis)
} else {
let next_right = next_meaningful_token(tokens, maybe_another_operator.index, None);
find_end_of_binary_expression(tokens, next_right.index)
} }
let next_right = next_meaningful_token(tokens, maybe_another_operator.index, None);
return find_end_of_binary_expression(tokens, next_right.index);
} else { } else {
return Ok(closing_parenthesis); Ok(closing_parenthesis)
} };
} }
if current_token.token_type == TokenType::Word if current_token.token_type == TokenType::Word
&& tokens.get(index + 1).unwrap().token_type == TokenType::Brace && tokens.get(index + 1).unwrap().token_type == TokenType::Brace
@ -458,17 +456,18 @@ fn find_end_of_binary_expression(tokens: &Vec<Token>, index: usize) -> Result<us
{ {
let closing_parenthesis = find_closing_brace(tokens, index + 1, 0, "")?; let closing_parenthesis = find_closing_brace(tokens, index + 1, 0, "")?;
let maybe_another_operator = next_meaningful_token(tokens, closing_parenthesis, None); let maybe_another_operator = next_meaningful_token(tokens, closing_parenthesis, None);
if let Some(maybe_another_operator_token) = maybe_another_operator.token { return if let Some(maybe_another_operator_token) = maybe_another_operator.token {
if maybe_another_operator_token.token_type != TokenType::Operator if maybe_another_operator_token.token_type != TokenType::Operator
|| maybe_another_operator_token.value == "|>" || maybe_another_operator_token.value == "|>"
{ {
return Ok(closing_parenthesis); Ok(closing_parenthesis)
} else {
let next_right = next_meaningful_token(tokens, maybe_another_operator.index, None);
find_end_of_binary_expression(tokens, next_right.index)
} }
let next_right = next_meaningful_token(tokens, maybe_another_operator.index, None);
return find_end_of_binary_expression(tokens, next_right.index);
} else { } else {
return Ok(closing_parenthesis); Ok(closing_parenthesis)
} };
} }
let maybe_operator = next_meaningful_token(tokens, index, None); let maybe_operator = next_meaningful_token(tokens, index, None);
if let Some(maybe_operator_token) = maybe_operator.token { if let Some(maybe_operator_token) = maybe_operator.token {
@ -489,7 +488,7 @@ struct ValueReturn {
last_index: usize, last_index: usize,
} }
fn make_value(tokens: &Vec<Token>, index: usize) -> Result<ValueReturn, KclError> { fn make_value(tokens: &[Token], index: usize) -> Result<ValueReturn, KclError> {
let current_token = &tokens[index]; let current_token = &tokens[index];
let next = next_meaningful_token(tokens, index, None); let next = next_meaningful_token(tokens, index, None);
if let Some(next_token) = &next.token { if let Some(next_token) = &next.token {
@ -584,25 +583,27 @@ fn make_value(tokens: &Vec<Token>, index: usize) -> Result<ValueReturn, KclError
if current_token.token_type == TokenType::Brace && current_token.value == "(" { if current_token.token_type == TokenType::Brace && current_token.value == "(" {
let closing_brace_index = find_closing_brace(tokens, index, 0, "")?; let closing_brace_index = find_closing_brace(tokens, index, 0, "")?;
if let Some(arrow_token) = next_meaningful_token(tokens, closing_brace_index, None).token { return if let Some(arrow_token) =
next_meaningful_token(tokens, closing_brace_index, None).token
{
if arrow_token.token_type == TokenType::Operator && arrow_token.value == "=>" { if arrow_token.token_type == TokenType::Operator && arrow_token.value == "=>" {
let function_expression = make_function_expression(tokens, index)?; let function_expression = make_function_expression(tokens, index)?;
return Ok(ValueReturn { Ok(ValueReturn {
value: Value::FunctionExpression(Box::new(function_expression.expression)), value: Value::FunctionExpression(Box::new(function_expression.expression)),
last_index: function_expression.last_index, last_index: function_expression.last_index,
}); })
} else { } else {
return Err(KclError::Unimplemented(KclErrorDetails { Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![[current_token.start as i32, current_token.end as i32]], source_ranges: vec![[current_token.start as i32, current_token.end as i32]],
message: "expression with braces".to_string(), message: "expression with braces".to_string(),
})); }))
} }
} else { } else {
return Err(KclError::Unimplemented(KclErrorDetails { Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![[current_token.start as i32, current_token.end as i32]], source_ranges: vec![[current_token.start as i32, current_token.end as i32]],
message: "expression with braces".to_string(), message: "expression with braces".to_string(),
})); }))
} };
} }
if current_token.token_type == TokenType::Operator && current_token.value == "-" { if current_token.token_type == TokenType::Operator && current_token.value == "-" {
@ -625,7 +626,7 @@ struct ArrayElementsReturn {
} }
fn make_array_elements( fn make_array_elements(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_elements: Vec<Value>, previous_elements: Vec<Value>,
) -> Result<ArrayElementsReturn, KclError> { ) -> Result<ArrayElementsReturn, KclError> {
@ -676,7 +677,7 @@ struct ArrayReturn {
last_index: usize, last_index: usize,
} }
fn make_array_expression(tokens: &Vec<Token>, index: usize) -> Result<ArrayReturn, KclError> { fn make_array_expression(tokens: &[Token], index: usize) -> Result<ArrayReturn, KclError> {
let opening_brace_token = &tokens[index]; let opening_brace_token = &tokens[index];
let first_element_token = next_meaningful_token(tokens, index, None); let first_element_token = next_meaningful_token(tokens, index, None);
let array_elements = make_array_elements(tokens, first_element_token.index, Vec::new())?; let array_elements = make_array_elements(tokens, first_element_token.index, Vec::new())?;
@ -697,7 +698,7 @@ struct PipeBodyReturn {
} }
fn make_pipe_body( fn make_pipe_body(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_values: Vec<Value>, previous_values: Vec<Value>,
previous_non_code_meta: Option<NoneCodeMeta>, previous_non_code_meta: Option<NoneCodeMeta>,
@ -761,11 +762,11 @@ struct BinaryExpressionReturn {
} }
fn make_binary_expression( fn make_binary_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<BinaryExpressionReturn, KclError> { ) -> Result<BinaryExpressionReturn, KclError> {
let end_index = find_end_of_binary_expression(tokens, index)?; let end_index = find_end_of_binary_expression(tokens, index)?;
let expression = parse_expression(tokens[index..end_index + 1].to_vec())?; let expression = parse_expression(&tokens[index..end_index + 1])?;
Ok(BinaryExpressionReturn { Ok(BinaryExpressionReturn {
expression, expression,
last_index: end_index, last_index: end_index,
@ -778,7 +779,7 @@ struct ArgumentsReturn {
} }
fn make_arguments( fn make_arguments(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_args: Vec<Value>, previous_args: Vec<Value>,
) -> Result<ArgumentsReturn, KclError> { ) -> Result<ArgumentsReturn, KclError> {
@ -873,7 +874,7 @@ fn make_arguments(
&& next_brace_or_comma_token.value == "(" && next_brace_or_comma_token.value == "("
{ {
let closing_brace = find_closing_brace(tokens, next_brace_or_comma.index, 0, "")?; let closing_brace = find_closing_brace(tokens, next_brace_or_comma.index, 0, "")?;
if let Some(token_after_closing_brace) = return if let Some(token_after_closing_brace) =
next_meaningful_token(tokens, closing_brace, None).token next_meaningful_token(tokens, closing_brace, None).token
{ {
if token_after_closing_brace.token_type == TokenType::Operator if token_after_closing_brace.token_type == TokenType::Operator
@ -887,28 +888,25 @@ fn make_arguments(
_previous_args.push(Value::BinaryExpression(Box::new( _previous_args.push(Value::BinaryExpression(Box::new(
binary_expression.expression, binary_expression.expression,
))); )));
return make_arguments( make_arguments(tokens, next_comma_or_brace_token_index, _previous_args)
tokens, } else {
next_comma_or_brace_token_index, let call_expression = make_call_expression(tokens, argument_token.index)?;
_previous_args, let next_comma_or_brace_token_index =
); next_meaningful_token(tokens, call_expression.last_index, None).index;
let mut _previous_args = previous_args;
_previous_args
.push(Value::CallExpression(Box::new(call_expression.expression)));
make_arguments(tokens, next_comma_or_brace_token_index, _previous_args)
} }
let call_expression = make_call_expression(tokens, argument_token.index)?;
let next_comma_or_brace_token_index =
next_meaningful_token(tokens, call_expression.last_index, None).index;
let mut _previous_args = previous_args;
_previous_args
.push(Value::CallExpression(Box::new(call_expression.expression)));
return make_arguments(tokens, next_comma_or_brace_token_index, _previous_args);
} else { } else {
return Err(KclError::Unimplemented(KclErrorDetails { Err(KclError::Unimplemented(KclErrorDetails {
source_ranges: vec![[ source_ranges: vec![[
argument_token_token.start as i32, argument_token_token.start as i32,
argument_token_token.end as i32, argument_token_token.end as i32,
]], ]],
message: format!("Unexpected token {} ", argument_token_token.value), message: format!("Unexpected token {} ", argument_token_token.value),
})); }))
} };
} }
if argument_token_token.token_type == TokenType::Word { if argument_token_token.token_type == TokenType::Word {
@ -963,7 +961,7 @@ pub struct CallExpressionResult {
} }
pub fn make_call_expression( pub fn make_call_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<CallExpressionResult, KclError> { ) -> Result<CallExpressionResult, KclError> {
let current_token = tokens[index].clone(); let current_token = tokens[index].clone();
@ -988,10 +986,7 @@ struct PipeExpressionResult {
last_index: usize, last_index: usize,
} }
fn make_pipe_expression( fn make_pipe_expression(tokens: &[Token], index: usize) -> Result<PipeExpressionResult, KclError> {
tokens: &Vec<Token>,
index: usize,
) -> Result<PipeExpressionResult, KclError> {
let current_token = tokens[index].clone(); let current_token = tokens[index].clone();
let pipe_body_result = make_pipe_body(tokens, index, vec![], None)?; let pipe_body_result = make_pipe_body(tokens, index, vec![], None)?;
let end_token = tokens[pipe_body_result.last_index].clone(); let end_token = tokens[pipe_body_result.last_index].clone();
@ -1012,7 +1007,7 @@ struct VariableDeclaratorsReturn {
} }
fn make_variable_declarators( fn make_variable_declarators(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_declarators: Vec<VariableDeclarator>, previous_declarators: Vec<VariableDeclarator>,
) -> Result<VariableDeclaratorsReturn, KclError> { ) -> Result<VariableDeclaratorsReturn, KclError> {
@ -1062,7 +1057,7 @@ struct VariableDeclarationResult {
} }
fn make_variable_declaration( fn make_variable_declaration(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<VariableDeclarationResult, KclError> { ) -> Result<VariableDeclarationResult, KclError> {
let current_token = tokens[index].clone(); let current_token = tokens[index].clone();
@ -1094,7 +1089,7 @@ pub struct ParamsResult {
} }
fn make_params( fn make_params(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_params: Vec<Identifier>, previous_params: Vec<Identifier>,
) -> Result<ParamsResult, KclError> { ) -> Result<ParamsResult, KclError> {
@ -1133,7 +1128,7 @@ struct UnaryExpressionResult {
} }
fn make_unary_expression( fn make_unary_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<UnaryExpressionResult, KclError> { ) -> Result<UnaryExpressionResult, KclError> {
let current_token = &tokens[index]; let current_token = &tokens[index];
@ -1180,7 +1175,7 @@ struct ExpressionStatementResult {
} }
fn make_expression_statement( fn make_expression_statement(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<ExpressionStatementResult, KclError> { ) -> Result<ExpressionStatementResult, KclError> {
let current_token = &tokens[index]; let current_token = &tokens[index];
@ -1222,7 +1217,7 @@ struct ObjectPropertiesResult {
} }
fn make_object_properties( fn make_object_properties(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
previous_properties: Vec<ObjectProperty>, previous_properties: Vec<ObjectProperty>,
) -> Result<ObjectPropertiesResult, KclError> { ) -> Result<ObjectPropertiesResult, KclError> {
@ -1286,7 +1281,7 @@ struct ObjectExpressionResult {
} }
fn make_object_expression( fn make_object_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<ObjectExpressionResult, KclError> { ) -> Result<ObjectExpressionResult, KclError> {
let opening_brace_token = &tokens[index]; let opening_brace_token = &tokens[index];
@ -1308,7 +1303,7 @@ struct ReturnStatementResult {
} }
fn make_return_statement( fn make_return_statement(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<ReturnStatementResult, KclError> { ) -> Result<ReturnStatementResult, KclError> {
let current_token = &tokens[index]; let current_token = &tokens[index];
@ -1333,7 +1328,7 @@ struct BodyResult {
} }
fn make_body( fn make_body(
tokens: &Vec<Token>, tokens: &[Token],
token_index: usize, token_index: usize,
previous_body: Vec<BodyItem>, previous_body: Vec<BodyItem>,
previous_non_code_meta: NoneCodeMeta, previous_non_code_meta: NoneCodeMeta,
@ -1485,10 +1480,7 @@ struct BlockStatementResult {
last_index: usize, last_index: usize,
} }
fn make_block_statement( fn make_block_statement(tokens: &[Token], index: usize) -> Result<BlockStatementResult, KclError> {
tokens: &Vec<Token>,
index: usize,
) -> Result<BlockStatementResult, KclError> {
let opening_curly = tokens[index].clone(); let opening_curly = tokens[index].clone();
let next_token = &tokens[index + 1]; let next_token = &tokens[index + 1];
let next_token_index = index + 1; let next_token_index = index + 1;
@ -1529,7 +1521,7 @@ struct FunctionExpressionResult {
} }
fn make_function_expression( fn make_function_expression(
tokens: &Vec<Token>, tokens: &[Token],
index: usize, index: usize,
) -> Result<FunctionExpressionResult, KclError> { ) -> Result<FunctionExpressionResult, KclError> {
let current_token = &tokens[index]; let current_token = &tokens[index];
@ -1550,7 +1542,7 @@ fn make_function_expression(
}) })
} }
pub fn abstract_syntax_tree(tokens: &Vec<Token>) -> Result<Program, KclError> { pub fn abstract_syntax_tree(tokens: &[Token]) -> Result<Program, KclError> {
let body = make_body( let body = make_body(
tokens, tokens,
0, 0,

View File

@ -69,9 +69,9 @@ fn recast_binary_part(part: BinaryPart) -> String {
recast_binary_expression(*binary_expression) recast_binary_expression(*binary_expression)
} }
BinaryPart::CallExpression(call_expression) => { BinaryPart::CallExpression(call_expression) => {
recast_call_expression(*call_expression, "".to_string(), false) recast_call_expression(&call_expression, "", false)
} }
_ => "".to_string(), _ => String::new(),
} }
} }
@ -79,36 +79,36 @@ fn recast_value(node: Value, _indentation: String, is_in_pipe_expression: bool)
let indentation = _indentation + if is_in_pipe_expression { " " } else { "" }; let indentation = _indentation + if is_in_pipe_expression { " " } else { "" };
match node { match node {
Value::BinaryExpression(bin_exp) => recast_binary_expression(*bin_exp), Value::BinaryExpression(bin_exp) => recast_binary_expression(*bin_exp),
Value::ArrayExpression(array_exp) => recast_array_expression(*array_exp, indentation), Value::ArrayExpression(array_exp) => recast_array_expression(&array_exp, &indentation),
Value::ObjectExpression(obj_exp) => { Value::ObjectExpression(ref obj_exp) => {
recast_object_expression(*obj_exp, indentation, is_in_pipe_expression) recast_object_expression(obj_exp, &indentation, is_in_pipe_expression)
} }
Value::MemberExpression(mem_exp) => recast_member_expression(*mem_exp), Value::MemberExpression(mem_exp) => recast_member_expression(*mem_exp),
Value::Literal(literal) => recast_literal(*literal), Value::Literal(literal) => recast_literal(*literal),
Value::FunctionExpression(func_exp) => recast_function(*func_exp), Value::FunctionExpression(func_exp) => recast_function(*func_exp),
Value::CallExpression(call_exp) => { Value::CallExpression(call_exp) => {
recast_call_expression(*call_exp, indentation, is_in_pipe_expression) recast_call_expression(&call_exp, &indentation, is_in_pipe_expression)
} }
Value::Identifier(ident) => ident.name, Value::Identifier(ident) => ident.name,
Value::PipeExpression(pipe_exp) => recast_pipe_expression(*pipe_exp), Value::PipeExpression(pipe_exp) => recast_pipe_expression(&pipe_exp),
Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp), Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp),
_ => "".to_string(), _ => String::new(),
} }
} }
fn recast_array_expression(expression: ArrayExpression, indentation: String) -> String { fn recast_array_expression(expression: &ArrayExpression, indentation: &str) -> String {
let flat_recast = format!( let flat_recast = format!(
"[{}]", "[{}]",
expression expression
.elements .elements
.iter() .iter()
.map(|el| recast_value(el.clone(), "".to_string(), false)) .map(|el| recast_value(el.clone(), String::new(), false))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", ") .join(", ")
); );
let max_array_length = 40; let max_array_length = 40;
if flat_recast.len() > max_array_length { if flat_recast.len() > max_array_length {
let _indentation = indentation.clone() + " "; let _indentation = indentation.to_string() + " ";
format!( format!(
"[\n{}{}\n{}]", "[\n{}{}\n{}]",
_indentation, _indentation,
@ -126,8 +126,8 @@ fn recast_array_expression(expression: ArrayExpression, indentation: String) ->
} }
fn recast_object_expression( fn recast_object_expression(
expression: ObjectExpression, expression: &ObjectExpression,
indentation: String, indentation: &str,
is_in_pipe_expression: bool, is_in_pipe_expression: bool,
) -> String { ) -> String {
let flat_recast = format!( let flat_recast = format!(
@ -139,7 +139,7 @@ fn recast_object_expression(
format!( format!(
"{}: {}", "{}: {}",
prop.key.name, prop.key.name,
recast_value(prop.value.clone(), "".to_string(), false) recast_value(prop.value.clone(), String::new(), false)
) )
}) })
.collect::<Vec<String>>() .collect::<Vec<String>>()
@ -147,7 +147,7 @@ fn recast_object_expression(
); );
let max_array_length = 40; let max_array_length = 40;
if flat_recast.len() > max_array_length { if flat_recast.len() > max_array_length {
let _indentation = indentation + " "; let _indentation = indentation.to_owned() + " ";
format!( format!(
"{{\n{}{}\n{}}}", "{{\n{}{}\n{}}}",
_indentation, _indentation,
@ -175,8 +175,8 @@ fn recast_object_expression(
} }
fn recast_call_expression( fn recast_call_expression(
expression: CallExpression, expression: &CallExpression,
indentation: String, indentation: &str,
is_in_pipe_expression: bool, is_in_pipe_expression: bool,
) -> String { ) -> String {
format!( format!(
@ -185,28 +185,28 @@ fn recast_call_expression(
expression expression
.arguments .arguments
.iter() .iter()
.map(|arg| recast_argument(arg.clone(), indentation.clone(), is_in_pipe_expression)) .map(|arg| recast_argument(arg.clone(), indentation, is_in_pipe_expression))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", ") .join(", ")
) )
} }
fn recast_argument(argument: Value, indentation: String, is_in_pipe_expression: bool) -> String { fn recast_argument(argument: Value, indentation: &str, is_in_pipe_expression: bool) -> String {
match argument { match argument {
Value::Literal(literal) => recast_literal(*literal), Value::Literal(literal) => recast_literal(*literal),
Value::Identifier(identifier) => identifier.name, Value::Identifier(identifier) => identifier.name,
Value::BinaryExpression(binary_exp) => recast_binary_expression(*binary_exp), Value::BinaryExpression(binary_exp) => recast_binary_expression(*binary_exp),
Value::ArrayExpression(array_exp) => recast_array_expression(*array_exp, indentation), Value::ArrayExpression(array_exp) => recast_array_expression(&array_exp, indentation),
Value::ObjectExpression(object_exp) => { Value::ObjectExpression(object_exp) => {
recast_object_expression(*object_exp, indentation, is_in_pipe_expression) recast_object_expression(&object_exp, indentation, is_in_pipe_expression)
} }
Value::CallExpression(call_exp) => { Value::CallExpression(call_exp) => {
recast_call_expression(*call_exp, indentation, is_in_pipe_expression) recast_call_expression(&call_exp, indentation, is_in_pipe_expression)
} }
Value::FunctionExpression(function_exp) => recast_function(*function_exp), Value::FunctionExpression(function_exp) => recast_function(*function_exp),
Value::PipeSubstitution(_) => "%".to_string(), Value::PipeSubstitution(_) => "%".to_string(),
Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp), Value::UnaryExpression(unary_exp) => recast_unary_expression(*unary_exp),
_ => "".to_string(), _ => String::new(),
} }
} }
@ -230,7 +230,7 @@ fn recast_member_expression(expression: MemberExpression) -> String {
} }
} }
fn recast_pipe_expression(expression: PipeExpression) -> String { fn recast_pipe_expression(expression: &PipeExpression) -> String {
expression expression
.body .body
.iter() .iter()
@ -243,8 +243,8 @@ fn recast_pipe_expression(expression: PipeExpression) -> String {
if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) { if let Some(non_code_meta_value) = non_code_meta.none_code_nodes.get(&index) {
if non_code_meta_value.value != " " { if non_code_meta_value.value != " " {
str += non_code_meta_value.value.as_str(); str += non_code_meta_value.value.as_str();
indentation = "".to_string(); indentation = String::new();
maybe_line_break = "".to_string(); maybe_line_break = String::new();
} }
} }
@ -255,8 +255,7 @@ fn recast_pipe_expression(expression: PipeExpression) -> String {
} }
str str
}) })
.collect::<Vec<String>>() .collect::<String>()
.join("")
} }
fn recast_unary_expression(expression: UnaryExpression) -> String { fn recast_unary_expression(expression: UnaryExpression) -> String {
@ -272,11 +271,11 @@ fn recast_unary_expression(expression: UnaryExpression) -> String {
format!( format!(
"{}{}", "{}{}",
expression.operator, expression.operator,
recast_value(bin_part_val, "".to_string(), false) recast_value(bin_part_val, String::new(), false)
) )
} }
pub fn recast(ast: Program, indentation: String, is_with_block: bool) -> String { pub fn recast(ast: &Program, indentation: &str, is_with_block: bool) -> String {
ast.body ast.body
.iter() .iter()
.map(|statement| match statement.clone() { .map(|statement| match statement.clone() {
@ -286,13 +285,13 @@ pub fn recast(ast: Program, indentation: String, is_with_block: bool) -> String
recast_binary_expression(*binary_expression) recast_binary_expression(*binary_expression)
} }
Value::ArrayExpression(array_expression) => { Value::ArrayExpression(array_expression) => {
recast_array_expression(*array_expression, "".to_string()) recast_array_expression(&array_expression, "")
} }
Value::ObjectExpression(object_expression) => { Value::ObjectExpression(object_expression) => {
recast_object_expression(*object_expression, "".to_string(), false) recast_object_expression(&object_expression, "", false)
} }
Value::CallExpression(call_expression) => { Value::CallExpression(call_expression) => {
recast_call_expression(*call_expression, "".to_string(), false) recast_call_expression(&call_expression, "", false)
} }
_ => "Expression".to_string(), _ => "Expression".to_string(),
} }
@ -305,15 +304,14 @@ pub fn recast(ast: Program, indentation: String, is_with_block: bool) -> String
"{} {} = {}", "{} {} = {}",
variable_declaration.kind, variable_declaration.kind,
declaration.id.name, declaration.id.name,
recast_value(declaration.init.clone(), "".to_string(), false) recast_value(declaration.init.clone(), String::new(), false)
) )
}) })
.collect::<Vec<String>>() .collect::<String>(),
.join(""),
BodyItem::ReturnStatement(return_statement) => { BodyItem::ReturnStatement(return_statement) => {
format!( format!(
"return {}", "return {}",
recast_argument(return_statement.argument, "".to_string(), false) recast_argument(return_statement.argument, "", false)
) )
} }
}) })
@ -338,24 +336,24 @@ pub fn recast(ast: Program, indentation: String, is_with_block: bool) -> String
// indentation of this line will be covered by the previous if we're using a custom whitespace or comment // indentation of this line will be covered by the previous if we're using a custom whitespace or comment
let mut start_string = let mut start_string =
if is_legit_custom_whitespace_or_comment(last_white_space_or_comment) { if is_legit_custom_whitespace_or_comment(last_white_space_or_comment) {
"".to_string() String::new()
} else { } else {
indentation.clone() indentation.to_owned()
}; };
if index == 0 { if index == 0 {
if let Some(start) = ast.non_code_meta.start.clone() { if let Some(start) = ast.non_code_meta.start.clone() {
start_string = start.value; start_string = start.value;
} else { } else {
start_string = indentation.clone(); start_string = indentation.to_owned();
} }
} }
if start_string.ends_with('\n') { if start_string.ends_with('\n') {
start_string += indentation.as_str(); start_string += indentation;
} }
// determine the value of endString // determine the value of endString
let maybe_line_break: String = if index == ast.body.len() - 1 && !is_with_block { let maybe_line_break: String = if index == ast.body.len() - 1 && !is_with_block {
"".to_string() String::new()
} else { } else {
"\n".to_string() "\n".to_string()
}; };
@ -364,21 +362,20 @@ pub fn recast(ast: Program, indentation: String, is_with_block: bool) -> String
Some(custom_white_space_or_comment) => { Some(custom_white_space_or_comment) => {
custom_white_space_or_comment.value.clone() custom_white_space_or_comment.value.clone()
} }
None => "".to_string(), None => String::new(),
}; };
if !is_legit_custom_whitespace_or_comment(custom_white_space_or_comment.clone()) { if !is_legit_custom_whitespace_or_comment(custom_white_space_or_comment.clone()) {
custom_white_space_or_comment = "".to_string(); custom_white_space_or_comment = String::new();
} }
let end_string = if !custom_white_space_or_comment.is_empty() { let end_string = if custom_white_space_or_comment.is_empty() {
custom_white_space_or_comment
} else {
maybe_line_break maybe_line_break
} else {
custom_white_space_or_comment
}; };
format!("{}{}{}", start_string, recast_str, end_string) format!("{}{}{}", start_string, recast_str, end_string)
}) })
.collect::<Vec<String>>() .collect::<String>()
.join("")
} }
pub fn recast_function(expression: FunctionExpression) -> String { pub fn recast_function(expression: FunctionExpression) -> String {
@ -391,13 +388,13 @@ pub fn recast_function(expression: FunctionExpression) -> String {
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", "), .join(", "),
recast( recast(
Program { &Program {
start: expression.body.start, start: expression.body.start,
end: expression.body.start, end: expression.body.start,
body: expression.body.body, body: expression.body.body,
non_code_meta: expression.body.non_code_meta non_code_meta: expression.body.non_code_meta
}, },
"".to_string(), "",
true true
) )
) )
@ -418,6 +415,6 @@ pub fn recast_js(json_str: &str) -> Result<JsValue, JsError> {
// deserialize the ast from a stringified json // deserialize the ast from a stringified json
let program: Program = serde_json::from_str(json_str).map_err(JsError::from)?; let program: Program = serde_json::from_str(json_str).map_err(JsError::from)?;
let result = recast(program, "".to_string(), false); let result = recast(&program, "", false);
Ok(serde_wasm_bindgen::to_value(&result)?) Ok(serde_wasm_bindgen::to_value(&result)?)
} }