More little bugfixes

NonCodeMeta can have many nodes in the 'start' not just 0 or 1

fix Double newline after block comments
This commit is contained in:
Adam Chalmers
2023-10-11 14:12:59 -05:00
parent c3a61f368e
commit e2ad24360a
8 changed files with 96 additions and 43 deletions

View File

@ -29,7 +29,7 @@ class KclManager {
end: 0, end: 0,
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
} }
private _programMemory: ProgramMemory = { private _programMemory: ProgramMemory = {
@ -220,7 +220,7 @@ class KclManager {
end: 0, end: 0,
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
} }
this._programMemory = { this._programMemory = {

View File

@ -104,7 +104,7 @@ describe('Testing addSketchTo', () => {
body: [], body: [],
start: 0, start: 0,
end: 0, end: 0,
nonCodeMeta: { nonCodeNodes: {}, start: null }, nonCodeMeta: { nonCodeNodes: {}, start: [] },
}, },
'yz' 'yz'
) )

View File

@ -540,7 +540,7 @@ export function createPipeExpression(
start: 0, start: 0,
end: 0, end: 0,
body, body,
nonCodeMeta: { nonCodeNodes: {}, start: null }, nonCodeMeta: { nonCodeNodes: {}, start: [] },
} }
} }

View File

@ -273,6 +273,7 @@ const mySk1 = startSketchAt([0, 0])
const { ast } = code2ast(code) const { ast } = code2ast(code)
const recasted = recast(ast) const recasted = recast(ast)
expect(recasted).toBe(`/* comment at start */ expect(recasted).toBe(`/* comment at start */
const mySk1 = startSketchAt([0, 0]) const mySk1 = startSketchAt([0, 0])
|> lineTo([1, 1], %) |> lineTo([1, 1], %)
// comment here // comment here

View File

@ -257,7 +257,7 @@ export async function executeCode({
body: [], body: [],
nonCodeMeta: { nonCodeMeta: {
nonCodeNodes: {}, nonCodeNodes: {},
start: null, start: [],
}, },
}, },
} }

View File

@ -63,10 +63,14 @@ impl Program {
.fold(String::new(), |mut output, (index, recast_str)| { .fold(String::new(), |mut output, (index, recast_str)| {
let start_string = if index == 0 { let start_string = if index == 0 {
// We need to indent. // We need to indent.
if let Some(start) = self.non_code_meta.start.clone() { if self.non_code_meta.start.is_empty() {
start.format(&indentation)
} else {
indentation.to_string() indentation.to_string()
} else {
self.non_code_meta
.start
.iter()
.map(|start| start.format(&indentation))
.collect()
} }
} else { } else {
// Do nothing, we already applied the indentation elsewhere. // Do nothing, we already applied the indentation elsewhere.
@ -730,7 +734,7 @@ impl NonCodeNode {
NonCodeValue::BlockComment { value, style } => { NonCodeValue::BlockComment { value, style } => {
let add_start_new_line = if self.start == 0 { "" } else { "\n" }; let add_start_new_line = if self.start == 0 { "" } else { "\n" };
match style { match style {
CommentStyle::Block => format!("{}{}/* {} */\n", add_start_new_line, indentation, value), CommentStyle::Block => format!("{}{}/* {} */", add_start_new_line, indentation, value),
CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value), CommentStyle::Line => format!("{}{}// {}\n", add_start_new_line, indentation, value),
} }
} }
@ -798,7 +802,7 @@ pub enum NonCodeValue {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct NonCodeMeta { pub struct NonCodeMeta {
pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>, pub non_code_nodes: HashMap<usize, Vec<NonCodeNode>>,
pub start: Option<NonCodeNode>, pub start: Vec<NonCodeNode>,
} }
// implement Deserialize manually because we to force the keys of non_code_nodes to be usize // implement Deserialize manually because we to force the keys of non_code_nodes to be usize
@ -812,7 +816,7 @@ impl<'de> Deserialize<'de> for NonCodeMeta {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct NonCodeMetaHelper { struct NonCodeMetaHelper {
non_code_nodes: HashMap<String, Vec<NonCodeNode>>, non_code_nodes: HashMap<String, Vec<NonCodeNode>>,
start: Option<NonCodeNode>, start: Vec<NonCodeNode>,
} }
let helper = NonCodeMetaHelper::deserialize(deserializer)?; let helper = NonCodeMetaHelper::deserialize(deserializer)?;
@ -2908,6 +2912,25 @@ show(part001)"#;
// this is also a comment // this is also a comment
return things return things
} }
"#
);
}
#[test]
fn test_recast_comment_at_start() {
let test_program = r#"
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let parser = crate::parser::Parser::new(tokens);
let program = parser.ast().unwrap();
let recasted = program.recast(&Default::default(), 0);
assert_eq!(
recasted,
r#"/* comment at start */
const mySk1 = startSketchAt([0, 0])
"# "#
); );
} }

View File

@ -181,10 +181,33 @@ impl Parser {
Ok(token) Ok(token)
} }
/// Use the new Winnow parser.
pub fn ast(&self) -> Result<Program, KclError> { pub fn ast(&self) -> Result<Program, KclError> {
parser_impl::run_parser(&mut self.tokens.as_slice()) parser_impl::run_parser(&mut self.tokens.as_slice())
} }
/// Use the old handwritten recursive parser.
pub fn ast_old(&self) -> Result<Program, KclError> {
let body = self.make_body(
0,
vec![],
NonCodeMeta {
non_code_nodes: HashMap::new(),
start: Vec::new(),
},
)?;
let end = match self.get_token(body.last_index) {
Ok(token) => token.end,
Err(_) => self.tokens[self.tokens.len() - 1].end,
};
Ok(Program {
start: 0,
end,
body: body.body,
non_code_meta: body.non_code_meta,
})
}
fn make_identifier(&self, index: usize) -> Result<Identifier, KclError> { fn make_identifier(&self, index: usize) -> Result<Identifier, KclError> {
let current_token = self.get_token(index)?; let current_token = self.get_token(index)?;
Ok(Identifier { Ok(Identifier {
@ -1044,7 +1067,7 @@ impl Parser {
let non_code_meta = match previous_non_code_meta { let non_code_meta = match previous_non_code_meta {
Some(meta) => meta, Some(meta) => meta,
None => NonCodeMeta { None => NonCodeMeta {
start: None, start: Vec::new(),
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
}, },
}; };
@ -1642,7 +1665,9 @@ impl Parser {
let next_token = self.next_meaningful_token(token_index, Some(0))?; let next_token = self.next_meaningful_token(token_index, Some(0))?;
if let Some(node) = &next_token.non_code_node { if let Some(node) = &next_token.non_code_node {
if previous_body.is_empty() { if previous_body.is_empty() {
non_code_meta.start = next_token.non_code_node; if let Some(next) = next_token.non_code_node {
non_code_meta.start.push(next);
}
} else { } else {
non_code_meta.insert(previous_body.len(), node.clone()); non_code_meta.insert(previous_body.len(), node.clone());
} }
@ -1760,7 +1785,7 @@ impl Parser {
last_index: next_token_index, last_index: next_token_index,
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
} }
} else { } else {
@ -1769,7 +1794,7 @@ impl Parser {
vec![], vec![],
NonCodeMeta { NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
)? )?
}; };
@ -2641,7 +2666,7 @@ show(mySk1)"#;
vec![], vec![],
NonCodeMeta { NonCodeMeta {
non_code_nodes: HashMap::new(), non_code_nodes: HashMap::new(),
start: None, start: Vec::new(),
}, },
) )
.unwrap(); .unwrap();
@ -2677,10 +2702,7 @@ show(mySk1)"#;
})), })),
})), })),
})], })],
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta::default(),
non_code_nodes: Default::default(),
start: None,
},
}; };
assert_eq!(result, expected_result); assert_eq!(result, expected_result);
@ -3022,10 +3044,7 @@ e
}], }],
kind: VariableKind::Const, kind: VariableKind::Const,
})], })],
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta::default(),
non_code_nodes: Default::default(),
start: None,
},
}; };
assert_eq!(result, expected_result); assert_eq!(result, expected_result);

View File

@ -760,7 +760,7 @@ pub fn function_body(i: TokenSlice) -> PResult<Program> {
} }
end = nc.end; end = nc.end;
if body.is_empty() { if body.is_empty() {
non_code_meta.start = Some(nc) non_code_meta.start.push(nc);
} else { } else {
non_code_meta.insert(body.len() - 1, nc); non_code_meta.insert(body.len() - 1, nc);
} }
@ -1346,7 +1346,7 @@ mod tests {
let mut slice = tokens.as_slice(); let mut slice = tokens.as_slice();
let expr = function_expression.parse_next(&mut slice).unwrap(); let expr = function_expression.parse_next(&mut slice).unwrap();
assert_eq!(expr.params, vec![]); assert_eq!(expr.params, vec![]);
let comment_start = expr.body.non_code_meta.start.unwrap(); let comment_start = expr.body.non_code_meta.start.first().unwrap();
let comment0 = &expr.body.non_code_meta.non_code_nodes.get(&0).unwrap()[0]; let comment0 = &expr.body.non_code_meta.non_code_nodes.get(&0).unwrap()[0];
let comment1 = &expr.body.non_code_meta.non_code_nodes.get(&1).unwrap()[0]; let comment1 = &expr.body.non_code_meta.non_code_nodes.get(&1).unwrap()[0];
assert_eq!(comment_start.value(), "comment 0"); assert_eq!(comment_start.value(), "comment 0");
@ -1367,6 +1367,28 @@ comment */
assert_eq!(comment0.value(), "block\ncomment"); assert_eq!(comment0.value(), "block\ncomment");
} }
#[test]
fn test_comment_at_start_of_program() {
let test_program = r#"
/* comment at start */
const mySk1 = startSketchAt([0, 0])"#;
let tokens = crate::token::lexer(test_program);
let program = program.parse(&tokens).unwrap();
let mut starting_comments = program.non_code_meta.start;
assert_eq!(starting_comments.len(), 2);
let start0 = starting_comments.remove(0);
let start1 = starting_comments.remove(0);
assert_eq!(
start0.value,
NonCodeValue::BlockComment {
value: "comment at start".to_owned(),
style: CommentStyle::Block,
}
);
assert_eq!(start1.value, NonCodeValue::NewLine);
}
#[test] #[test]
fn test_whitespace_in_function() { fn test_whitespace_in_function() {
let test_program = r#"() => { let test_program = r#"() => {
@ -1408,11 +1430,11 @@ comment */
})], })],
non_code_meta: NonCodeMeta { non_code_meta: NonCodeMeta {
non_code_nodes: Default::default(), non_code_nodes: Default::default(),
start: Some(NonCodeNode { start: vec![NonCodeNode {
start: 7, start: 7,
end: 25, end: 25,
value: NonCodeValue::NewLine value: NonCodeValue::NewLine
}) }],
}, },
} }
} }
@ -1456,14 +1478,14 @@ comment */
let tokens = crate::token::lexer(test_program); let tokens = crate::token::lexer(test_program);
let Program { non_code_meta, .. } = function_body.parse(&tokens).unwrap(); let Program { non_code_meta, .. } = function_body.parse(&tokens).unwrap();
assert_eq!( assert_eq!(
Some(NonCodeNode { vec![NonCodeNode {
start: 0, start: 0,
end: 20, end: 20,
value: NonCodeValue::BlockComment { value: NonCodeValue::BlockComment {
value: "this is a comment".to_owned(), value: "this is a comment".to_owned(),
style: CommentStyle::Line, style: CommentStyle::Line,
}, },
}), }],
non_code_meta.start, non_code_meta.start,
); );
assert_eq!( assert_eq!(
@ -1577,18 +1599,6 @@ comment */
fn check_parsers_work_the_same() { fn check_parsers_work_the_same() {
for (i, test_program) in [ for (i, test_program) in [
"let x = 1 * (3 - 4)", "let x = 1 * (3 - 4)",
r#"
// this is a comment
const yo = { a: { b: { c: '123' } } }
const key = 'c'
const things = "things"
// this is also a comment"#,
r#"const three = 3
const yo = 3
"#,
r#"const x = 1 // this is an inline comment"#, r#"const x = 1 // this is an inline comment"#,
r#"fn x = () => { r#"fn x = () => {
return sg return sg
@ -1658,7 +1668,7 @@ const yo = 3
// Run the original parser // Run the original parser
let tokens = crate::token::lexer(test_program); let tokens = crate::token::lexer(test_program);
let expected = crate::parser::Parser::new(tokens.clone()) let expected = crate::parser::Parser::new(tokens.clone())
.ast() .ast_old()
.expect("Old parser failed"); .expect("Old parser failed");
// Run the second parser, check it matches the first parser. // Run the second parser, check it matches the first parser.