diff --git a/.codespellrc b/.codespellrc index f14154708..1fdfd1680 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,3 +1,3 @@ [codespell] -ignore-words-list: crate,everytime,inout,co-ordinate,ot,nwo,absolutey,atleast,ue,afterall +ignore-words-list: crate,everytime,inout,co-ordinate,ot,nwo,absolutey,atleast,ue,afterall,ket skip: **/target,node_modules,build,**/Cargo.lock,./docs/kcl/*.md,.yarn.lock,**/yarn.lock,./openapi/*.json,./src/lib/machine-api.d.ts diff --git a/src/wasm-lib/kcl/src/parsing/parser.rs b/src/wasm-lib/kcl/src/parsing/parser.rs index 70ea70903..2cb844f14 100644 --- a/src/wasm-lib/kcl/src/parsing/parser.rs +++ b/src/wasm-lib/kcl/src/parsing/parser.rs @@ -4,7 +4,7 @@ use std::{cell::RefCell, collections::HashMap, str::FromStr}; use winnow::{ - combinator::{alt, delimited, opt, peek, preceded, repeat, separated, separated_pair, terminated}, + combinator::{alt, opt, peek, preceded, repeat, separated, separated_pair, terminated}, dispatch, error::{ErrMode, StrContext, StrContextValue}, prelude::*, @@ -1662,12 +1662,24 @@ fn label(i: &mut TokenSlice) -> PResult> { } fn unnecessarily_bracketed(i: &mut TokenSlice) -> PResult { - delimited( + let (bra, result, ket) = ( terminated(open_paren, opt(whitespace)), expression, preceded(opt(whitespace), close_paren), ) - .parse_next(i) + .parse_next(i)?; + + let expr_range: SourceRange = (&result).into(); + + ParseContext::warn(CompilationError::with_suggestion( + SourceRange::new(bra.start, ket.end, result.module_id()), + None, + "Unnecessary parentheses around sub-expression", + Some(("Remove parentheses", i.text(expr_range))), + Tag::Unnecessary, + )); + + Ok(result) } fn expr_allowed_in_pipe_expr(i: &mut TokenSlice) -> PResult { @@ -4239,6 +4251,15 @@ var baz = 2 "# ); } + + #[test] + fn warn_unneccessary_parens() { + let some_program_string = r#"foo((a + b))"#; + let (_, errs) = assert_no_err(some_program_string); + assert_eq!(errs.len(), 1); + let replaced = errs[0].apply_suggestion(some_program_string).unwrap(); + assert_eq!(replaced, r#"foo(a + b)"#); + } } #[cfg(test)] diff --git a/src/wasm-lib/kcl/src/parsing/token/mod.rs b/src/wasm-lib/kcl/src/parsing/token/mod.rs index 59af3b30f..0598c6540 100644 --- a/src/wasm-lib/kcl/src/parsing/token/mod.rs +++ b/src/wasm-lib/kcl/src/parsing/token/mod.rs @@ -27,11 +27,18 @@ pub(crate) use tokeniser::RESERVED_WORDS; #[derive(Clone, Debug, PartialEq)] pub(crate) struct TokenStream { tokens: Vec, + // TODO this could easily be borrowed except that the LSP caches token streams + source: String, + module_id: ModuleId, } impl TokenStream { - fn new(tokens: Vec) -> Self { - Self { tokens } + fn new(tokens: Vec, source: String, module_id: ModuleId) -> Self { + Self { + tokens, + source, + module_id, + } } pub(super) fn remove_unknown(&mut self) -> Vec { @@ -54,6 +61,11 @@ impl TokenStream { pub fn as_slice(&self) -> TokenSlice { TokenSlice::from(self) } + + pub fn text(&self, range: SourceRange) -> &str { + debug_assert_eq!(range.module_id(), self.module_id); + &self.source[range.start()..range.end()] + } } impl<'a> From<&'a TokenStream> for TokenSlice<'a> { @@ -96,6 +108,10 @@ impl<'a> TokenSlice<'a> { &self.stream.tokens[i + self.start] } + pub fn text(&self, range: SourceRange) -> &str { + self.stream.text(range) + } + pub fn iter(&self) -> impl Iterator { (**self).iter() } diff --git a/src/wasm-lib/kcl/src/parsing/token/tokeniser.rs b/src/wasm-lib/kcl/src/parsing/token/tokeniser.rs index 074514f57..d246e61b4 100644 --- a/src/wasm-lib/kcl/src/parsing/token/tokeniser.rs +++ b/src/wasm-lib/kcl/src/parsing/token/tokeniser.rs @@ -69,7 +69,11 @@ pub(super) fn lex(i: &str, module_id: ModuleId) -> Result = Stateful, State>;