more expressive semantic tokens (#2814)
* updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * more semantic tokens Signed-off-by: Jess Frazelle <github@jessfraz.com> * cleanup Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
@ -1,4 +1,7 @@
|
|||||||
use crate::ast::types;
|
use crate::{
|
||||||
|
ast::{types, types::ValueMeta},
|
||||||
|
executor::SourceRange,
|
||||||
|
};
|
||||||
|
|
||||||
/// The "Node" type wraps all the AST elements we're able to find in a KCL
|
/// The "Node" type wraps all the AST elements we're able to find in a KCL
|
||||||
/// file. Tokens we walk through will be one of these.
|
/// file. Tokens we walk through will be one of these.
|
||||||
@ -33,6 +36,34 @@ pub enum Node<'a> {
|
|||||||
LiteralIdentifier(&'a types::LiteralIdentifier),
|
LiteralIdentifier(&'a types::LiteralIdentifier),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<&Node<'_>> for SourceRange {
|
||||||
|
fn from(node: &Node) -> Self {
|
||||||
|
match node {
|
||||||
|
Node::Program(p) => SourceRange([p.start, p.end]),
|
||||||
|
Node::ExpressionStatement(e) => SourceRange([e.start(), e.end()]),
|
||||||
|
Node::VariableDeclaration(v) => SourceRange([v.start(), v.end()]),
|
||||||
|
Node::ReturnStatement(r) => SourceRange([r.start(), r.end()]),
|
||||||
|
Node::VariableDeclarator(v) => SourceRange([v.start(), v.end()]),
|
||||||
|
Node::Literal(l) => SourceRange([l.start(), l.end()]),
|
||||||
|
Node::TagDeclarator(t) => SourceRange([t.start(), t.end()]),
|
||||||
|
Node::Identifier(i) => SourceRange([i.start(), i.end()]),
|
||||||
|
Node::BinaryExpression(b) => SourceRange([b.start(), b.end()]),
|
||||||
|
Node::FunctionExpression(f) => SourceRange([f.start(), f.end()]),
|
||||||
|
Node::CallExpression(c) => SourceRange([c.start(), c.end()]),
|
||||||
|
Node::PipeExpression(p) => SourceRange([p.start(), p.end()]),
|
||||||
|
Node::PipeSubstitution(p) => SourceRange([p.start(), p.end()]),
|
||||||
|
Node::ArrayExpression(a) => SourceRange([a.start(), a.end()]),
|
||||||
|
Node::ObjectExpression(o) => SourceRange([o.start(), o.end()]),
|
||||||
|
Node::MemberExpression(m) => SourceRange([m.start(), m.end()]),
|
||||||
|
Node::UnaryExpression(u) => SourceRange([u.start(), u.end()]),
|
||||||
|
Node::Parameter(p) => SourceRange([p.identifier.start(), p.identifier.end()]),
|
||||||
|
Node::ObjectProperty(o) => SourceRange([o.start(), o.end()]),
|
||||||
|
Node::MemberObject(m) => SourceRange([m.start(), m.end()]),
|
||||||
|
Node::LiteralIdentifier(l) => SourceRange([l.start(), l.end()]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! impl_from {
|
macro_rules! impl_from {
|
||||||
($node:ident, $t: ident) => {
|
($node:ident, $t: ident) => {
|
||||||
impl<'a> From<&'a types::$t> for Node<'a> {
|
impl<'a> From<&'a types::$t> for Node<'a> {
|
||||||
|
|||||||
@ -1,6 +1,11 @@
|
|||||||
//! Functions for the `kcl` lsp server.
|
//! Functions for the `kcl` lsp server.
|
||||||
|
|
||||||
use std::{collections::HashMap, io::Write, str::FromStr, sync::Arc};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
io::Write,
|
||||||
|
str::FromStr,
|
||||||
|
sync::{Arc, Mutex},
|
||||||
|
};
|
||||||
|
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
|
||||||
@ -23,8 +28,8 @@ use tower_lsp::{
|
|||||||
Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult,
|
Hover, HoverContents, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult,
|
||||||
InitializedParams, InlayHint, InlayHintParams, InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf,
|
InitializedParams, InlayHint, InlayHintParams, InsertTextFormat, MarkupContent, MarkupKind, MessageType, OneOf,
|
||||||
Position, RelatedFullDocumentDiagnosticReport, RenameFilesParams, RenameParams, SemanticToken,
|
Position, RelatedFullDocumentDiagnosticReport, RenameFilesParams, RenameParams, SemanticToken,
|
||||||
SemanticTokenType, SemanticTokens, SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions,
|
SemanticTokenModifier, SemanticTokenType, SemanticTokens, SemanticTokensFullOptions, SemanticTokensLegend,
|
||||||
SemanticTokensParams, SemanticTokensRegistrationOptions, SemanticTokensResult,
|
SemanticTokensOptions, SemanticTokensParams, SemanticTokensRegistrationOptions, SemanticTokensResult,
|
||||||
SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp, SignatureHelpOptions, SignatureHelpParams,
|
SemanticTokensServerCapabilities, ServerCapabilities, SignatureHelp, SignatureHelpOptions, SignatureHelpParams,
|
||||||
StaticRegistrationOptions, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
|
StaticRegistrationOptions, TextDocumentItem, TextDocumentRegistrationOptions, TextDocumentSyncCapability,
|
||||||
TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions, WorkspaceEdit,
|
TextDocumentSyncKind, TextDocumentSyncOptions, TextEdit, WorkDoneProgressOptions, WorkspaceEdit,
|
||||||
@ -33,16 +38,39 @@ use tower_lsp::{
|
|||||||
Client, LanguageServer,
|
Client, LanguageServer,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::backend::{InnerHandle, UpdateHandle};
|
|
||||||
use crate::{
|
|
||||||
ast::types::VariableKind,
|
|
||||||
executor::SourceRange,
|
|
||||||
lsp::{backend::Backend as _, safemap::SafeMap, util::IntoDiagnostic},
|
|
||||||
parser::PIPE_OPERATOR,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
use crate::lint::checks;
|
use crate::lint::checks;
|
||||||
|
use crate::{
|
||||||
|
ast::types::{Value, VariableKind},
|
||||||
|
executor::SourceRange,
|
||||||
|
lsp::{
|
||||||
|
backend::{Backend as _, InnerHandle, UpdateHandle},
|
||||||
|
safemap::SafeMap,
|
||||||
|
util::IntoDiagnostic,
|
||||||
|
},
|
||||||
|
parser::PIPE_OPERATOR,
|
||||||
|
token::TokenType,
|
||||||
|
};
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
pub static ref SEMANTIC_TOKEN_TYPES: Vec<SemanticTokenType> = {
|
||||||
|
// This is safe to unwrap because we know all the token types are valid.
|
||||||
|
// And the test would fail if they were not.
|
||||||
|
let mut gen = TokenType::all_semantic_token_types().unwrap();
|
||||||
|
gen.extend(vec![
|
||||||
|
SemanticTokenType::PARAMETER,
|
||||||
|
SemanticTokenType::PROPERTY,
|
||||||
|
]);
|
||||||
|
gen
|
||||||
|
};
|
||||||
|
|
||||||
|
pub static ref SEMANTIC_TOKEN_MODIFIERS: Vec<SemanticTokenModifier> = {
|
||||||
|
vec![
|
||||||
|
SemanticTokenModifier::DECLARATION,
|
||||||
|
SemanticTokenModifier::DEFINITION,
|
||||||
|
]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/// A subcommand for running the server.
|
/// A subcommand for running the server.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
@ -70,8 +98,6 @@ pub struct Backend {
|
|||||||
pub stdlib_completions: HashMap<String, CompletionItem>,
|
pub stdlib_completions: HashMap<String, CompletionItem>,
|
||||||
/// The stdlib signatures for the language.
|
/// The stdlib signatures for the language.
|
||||||
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
pub stdlib_signatures: HashMap<String, SignatureHelp>,
|
||||||
/// The types of tokens the server supports.
|
|
||||||
pub token_types: Vec<SemanticTokenType>,
|
|
||||||
/// Token maps.
|
/// Token maps.
|
||||||
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
pub token_map: SafeMap<String, Vec<crate::token::Token>>,
|
||||||
/// AST maps.
|
/// AST maps.
|
||||||
@ -214,7 +240,7 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Lets update the ast.
|
// Lets update the ast.
|
||||||
let parser = crate::parser::Parser::new(tokens);
|
let parser = crate::parser::Parser::new(tokens.clone());
|
||||||
let result = parser.ast();
|
let result = parser.ast();
|
||||||
let ast = match result {
|
let ast = match result {
|
||||||
Ok(ast) => ast,
|
Ok(ast) => ast,
|
||||||
@ -251,6 +277,9 @@ impl crate::lsp::backend::Backend for Backend {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
|
// Update our semantic tokens.
|
||||||
|
self.update_semantic_tokens(tokens, ¶ms).await;
|
||||||
|
|
||||||
#[cfg(not(target_arch = "wasm32"))]
|
#[cfg(not(target_arch = "wasm32"))]
|
||||||
{
|
{
|
||||||
let discovered_findings = ast
|
let discovered_findings = ast
|
||||||
@ -322,14 +351,14 @@ impl Backend {
|
|||||||
token_type = SemanticTokenType::FUNCTION;
|
token_type = SemanticTokenType::FUNCTION;
|
||||||
}
|
}
|
||||||
|
|
||||||
let token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||||
Some(index) => index,
|
Some(index) => index,
|
||||||
// This is actually bad this should not fail.
|
// This is actually bad this should not fail.
|
||||||
// TODO: ensure we never get here.
|
// The test for listing all semantic token types should make this never happen.
|
||||||
None => {
|
None => {
|
||||||
self.client
|
self.client
|
||||||
.log_message(
|
.log_message(
|
||||||
MessageType::INFO,
|
MessageType::ERROR,
|
||||||
format!("token type `{:?}` not accounted for", token_type),
|
format!("token type `{:?}` not accounted for", token_type),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
@ -340,6 +369,108 @@ impl Backend {
|
|||||||
let source_range: SourceRange = token.clone().into();
|
let source_range: SourceRange = token.clone().into();
|
||||||
let position = source_range.start_to_lsp_position(¶ms.text);
|
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||||
|
|
||||||
|
// Calculate the token modifiers.
|
||||||
|
// Get the value at the current position.
|
||||||
|
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||||
|
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||||
|
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||||
|
crate::lint::walk(&ast, &|node: crate::lint::Node| {
|
||||||
|
let node_range: SourceRange = (&node).into();
|
||||||
|
if !node_range.contains(source_range.start()) {
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
|
||||||
|
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
||||||
|
return Ok(true);
|
||||||
|
};
|
||||||
|
if *mods == 0 {
|
||||||
|
*mods = token_modifier_index;
|
||||||
|
} else {
|
||||||
|
*mods |= token_modifier_index;
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
match node {
|
||||||
|
crate::lint::Node::TagDeclarator(_) => {
|
||||||
|
return get_modifier(SemanticTokenModifier::DEFINITION);
|
||||||
|
}
|
||||||
|
crate::lint::Node::VariableDeclarator(variable) => {
|
||||||
|
let sr: SourceRange = variable.id.clone().into();
|
||||||
|
if sr.contains(source_range.start()) {
|
||||||
|
if let Value::FunctionExpression(_) = &variable.init {
|
||||||
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||||
|
Some(index) => index,
|
||||||
|
None => token_type_index,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
crate::lint::Node::Parameter(_) => {
|
||||||
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
|
||||||
|
Some(index) => index,
|
||||||
|
None => token_type_index,
|
||||||
|
};
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
crate::lint::Node::MemberExpression(member_expression) => {
|
||||||
|
let sr: SourceRange = member_expression.property.clone().into();
|
||||||
|
if sr.contains(source_range.start()) {
|
||||||
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||||
|
Some(index) => index,
|
||||||
|
None => token_type_index,
|
||||||
|
};
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
crate::lint::Node::ObjectProperty(object_property) => {
|
||||||
|
let sr: SourceRange = object_property.key.clone().into();
|
||||||
|
if sr.contains(source_range.start()) {
|
||||||
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||||
|
Some(index) => index,
|
||||||
|
None => token_type_index,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||||
|
}
|
||||||
|
crate::lint::Node::CallExpression(call_expr) => {
|
||||||
|
let sr: SourceRange = call_expr.callee.clone().into();
|
||||||
|
if sr.contains(source_range.start()) {
|
||||||
|
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||||
|
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||||
|
Some(index) => index,
|
||||||
|
None => token_type_index,
|
||||||
|
};
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
Ok(true)
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
let t = if let Ok(guard) = token_index.lock() { *guard } else { 0 };
|
||||||
|
token_type_index = t;
|
||||||
|
|
||||||
|
let m = if let Ok(guard) = modifier_index.lock() {
|
||||||
|
*guard
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
m
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
|
||||||
// We need to check if we are on the last token of the line.
|
// We need to check if we are on the last token of the line.
|
||||||
// If we are starting from the end of the last line just add 1 to the line.
|
// If we are starting from the end of the last line just add 1 to the line.
|
||||||
// Check if we are on the last token of the line.
|
// Check if we are on the last token of the line.
|
||||||
@ -351,8 +482,8 @@ impl Backend {
|
|||||||
delta_line: position.line - last_position.line + 1,
|
delta_line: position.line - last_position.line + 1,
|
||||||
delta_start: 0,
|
delta_start: 0,
|
||||||
length: token.value.len() as u32,
|
length: token.value.len() as u32,
|
||||||
token_type: token_type_index as u32,
|
token_type: token_type_index,
|
||||||
token_modifiers_bitset: 0,
|
token_modifiers_bitset,
|
||||||
};
|
};
|
||||||
|
|
||||||
semantic_tokens.push(semantic_token);
|
semantic_tokens.push(semantic_token);
|
||||||
@ -370,8 +501,8 @@ impl Backend {
|
|||||||
position.character - last_position.character
|
position.character - last_position.character
|
||||||
},
|
},
|
||||||
length: token.value.len() as u32,
|
length: token.value.len() as u32,
|
||||||
token_type: token_type_index as u32,
|
token_type: token_type_index,
|
||||||
token_modifiers_bitset: 0,
|
token_modifiers_bitset,
|
||||||
};
|
};
|
||||||
|
|
||||||
semantic_tokens.push(semantic_token);
|
semantic_tokens.push(semantic_token);
|
||||||
@ -518,8 +649,18 @@ impl Backend {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<usize> {
|
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
|
||||||
self.token_types.iter().position(|x| *x == token_type)
|
SEMANTIC_TOKEN_TYPES
|
||||||
|
.iter()
|
||||||
|
.position(|x| *x == token_type)
|
||||||
|
.map(|y| y as u32)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
|
||||||
|
SEMANTIC_TOKEN_MODIFIERS
|
||||||
|
.iter()
|
||||||
|
.position(|x| *x == token_type)
|
||||||
|
.map(|y| y as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||||
@ -739,8 +880,8 @@ impl LanguageServer for Backend {
|
|||||||
semantic_tokens_options: SemanticTokensOptions {
|
semantic_tokens_options: SemanticTokensOptions {
|
||||||
work_done_progress_options: WorkDoneProgressOptions::default(),
|
work_done_progress_options: WorkDoneProgressOptions::default(),
|
||||||
legend: SemanticTokensLegend {
|
legend: SemanticTokensLegend {
|
||||||
token_types: self.token_types.clone(),
|
token_types: SEMANTIC_TOKEN_TYPES.clone(),
|
||||||
token_modifiers: vec![],
|
token_modifiers: SEMANTIC_TOKEN_MODIFIERS.clone(),
|
||||||
},
|
},
|
||||||
range: Some(false),
|
range: Some(false),
|
||||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||||
|
|||||||
@ -5,7 +5,10 @@ use std::{
|
|||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
use tower_lsp::LanguageServer;
|
use tower_lsp::{
|
||||||
|
lsp_types::{SemanticTokenModifier, SemanticTokenType},
|
||||||
|
LanguageServer,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{executor::ProgramMemory, lsp::backend::Backend};
|
use crate::{executor::ProgramMemory, lsp::backend::Backend};
|
||||||
|
|
||||||
@ -42,9 +45,6 @@ async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
|||||||
let stdlib = crate::std::StdLib::new();
|
let stdlib = crate::std::StdLib::new();
|
||||||
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
let stdlib_completions = crate::lsp::kcl::get_completions_from_stdlib(&stdlib)?;
|
||||||
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
let stdlib_signatures = crate::lsp::kcl::get_signatures_from_stdlib(&stdlib)?;
|
||||||
// We can unwrap here because we know the tokeniser is valid, since
|
|
||||||
// we have a test for it.
|
|
||||||
let token_types = crate::token::TokenType::all_semantic_token_types()?;
|
|
||||||
|
|
||||||
let zoo_client = new_zoo_client();
|
let zoo_client = new_zoo_client();
|
||||||
|
|
||||||
@ -63,7 +63,6 @@ async fn kcl_lsp_server(execute: bool) -> Result<crate::lsp::kcl::Backend> {
|
|||||||
workspace_folders: Default::default(),
|
workspace_folders: Default::default(),
|
||||||
stdlib_completions,
|
stdlib_completions,
|
||||||
stdlib_signatures,
|
stdlib_signatures,
|
||||||
token_types,
|
|
||||||
token_map: Default::default(),
|
token_map: Default::default(),
|
||||||
ast_map: Default::default(),
|
ast_map: Default::default(),
|
||||||
memory_map: Default::default(),
|
memory_map: Default::default(),
|
||||||
@ -1087,6 +1086,163 @@ async fn test_kcl_lsp_semantic_tokens() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
|
async fn test_kcl_lsp_semantic_tokens_with_modifiers() {
|
||||||
|
let server = kcl_lsp_server(false).await.unwrap();
|
||||||
|
|
||||||
|
// Send open file.
|
||||||
|
server
|
||||||
|
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
language_id: "kcl".to_string(),
|
||||||
|
version: 1,
|
||||||
|
text: r#"const part001 = startSketchOn('XY')
|
||||||
|
|> startProfileAt([-10, -10], %)
|
||||||
|
|> line([20, 0], %)
|
||||||
|
|> line([0, 20], %, $seg01)
|
||||||
|
|> line([-20, 0], %)
|
||||||
|
|> close(%)
|
||||||
|
|> extrude(3.14, %)
|
||||||
|
|
||||||
|
const thing = {blah: "foo"}
|
||||||
|
const bar = thing.blah
|
||||||
|
|
||||||
|
fn myFn = (param1) => {
|
||||||
|
return param1
|
||||||
|
}"#
|
||||||
|
.to_string(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
server.wait_on_handle().await;
|
||||||
|
|
||||||
|
// Assure we have no diagnostics.
|
||||||
|
let diagnostics = server.diagnostics_map.get("file:///test.kcl").await.unwrap().clone();
|
||||||
|
// Check the diagnostics.
|
||||||
|
if let tower_lsp::lsp_types::DocumentDiagnosticReport::Full(diagnostics) = diagnostics {
|
||||||
|
if !diagnostics.full_document_diagnostic_report.items.is_empty() {
|
||||||
|
panic!(
|
||||||
|
"Expected no diagnostics, {:?}",
|
||||||
|
diagnostics.full_document_diagnostic_report.items
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
panic!("Expected full diagnostics");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the token map.
|
||||||
|
let token_map = server.token_map.get("file:///test.kcl").await.unwrap().clone();
|
||||||
|
assert!(token_map != vec![]);
|
||||||
|
|
||||||
|
// Get the ast.
|
||||||
|
let ast = server.ast_map.get("file:///test.kcl").await.unwrap().clone();
|
||||||
|
assert!(ast != crate::ast::types::Program::default());
|
||||||
|
|
||||||
|
// Send semantic tokens request.
|
||||||
|
let semantic_tokens = server
|
||||||
|
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||||
|
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||||
|
uri: "file:///test.kcl".try_into().unwrap(),
|
||||||
|
},
|
||||||
|
partial_result_params: Default::default(),
|
||||||
|
work_done_progress_params: Default::default(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Check the semantic tokens.
|
||||||
|
if let tower_lsp::lsp_types::SemanticTokensResult::Tokens(semantic_tokens) = semantic_tokens {
|
||||||
|
let function_index = server
|
||||||
|
.get_semantic_token_type_index(SemanticTokenType::FUNCTION)
|
||||||
|
.unwrap();
|
||||||
|
let property_index = server
|
||||||
|
.get_semantic_token_type_index(SemanticTokenType::PROPERTY)
|
||||||
|
.unwrap();
|
||||||
|
let parameter_index = server
|
||||||
|
.get_semantic_token_type_index(SemanticTokenType::PARAMETER)
|
||||||
|
.unwrap();
|
||||||
|
let variable_index = server
|
||||||
|
.get_semantic_token_type_index(SemanticTokenType::VARIABLE)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let declaration_index = server
|
||||||
|
.get_semantic_token_modifier_index(SemanticTokenModifier::DECLARATION)
|
||||||
|
.unwrap();
|
||||||
|
let definition_index = server
|
||||||
|
.get_semantic_token_modifier_index(SemanticTokenModifier::DEFINITION)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Iterate over the tokens and check the token types.
|
||||||
|
let mut found_definition = false;
|
||||||
|
let mut found_parameter = false;
|
||||||
|
let mut found_property = false;
|
||||||
|
let mut found_function_declaration = false;
|
||||||
|
let mut found_variable_declaration = false;
|
||||||
|
let mut found_property_declaration = false;
|
||||||
|
for token in semantic_tokens.data {
|
||||||
|
if token.token_modifiers_bitset == definition_index {
|
||||||
|
found_definition = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if token.token_type == parameter_index {
|
||||||
|
found_parameter = true;
|
||||||
|
} else if token.token_type == property_index {
|
||||||
|
found_property = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if token.token_type == function_index && token.token_modifiers_bitset == declaration_index {
|
||||||
|
found_function_declaration = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if token.token_type == variable_index && token.token_modifiers_bitset == declaration_index {
|
||||||
|
found_variable_declaration = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if token.token_type == property_index && token.token_modifiers_bitset == declaration_index {
|
||||||
|
found_property_declaration = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if found_definition
|
||||||
|
&& found_parameter
|
||||||
|
&& found_property
|
||||||
|
&& found_function_declaration
|
||||||
|
&& found_variable_declaration
|
||||||
|
&& found_property_declaration
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_definition {
|
||||||
|
panic!("Expected definition token");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_parameter {
|
||||||
|
panic!("Expected parameter token");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_property {
|
||||||
|
panic!("Expected property token");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_function_declaration {
|
||||||
|
panic!("Expected function declaration token");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_variable_declaration {
|
||||||
|
panic!("Expected variable declaration token");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_property_declaration {
|
||||||
|
panic!("Expected property declaration token");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
panic!("Expected semantic tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test(flavor = "multi_thread")]
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_kcl_lsp_semantic_tokens_multiple_comments() {
|
async fn test_kcl_lsp_semantic_tokens_multiple_comments() {
|
||||||
let server = kcl_lsp_server(false).await.unwrap();
|
let server = kcl_lsp_server(false).await.unwrap();
|
||||||
|
|||||||
@ -235,9 +235,6 @@ pub async fn kcl_lsp_run(
|
|||||||
let stdlib = kcl_lib::std::StdLib::new();
|
let stdlib = kcl_lib::std::StdLib::new();
|
||||||
let stdlib_completions = kcl_lib::lsp::kcl::get_completions_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
let stdlib_completions = kcl_lib::lsp::kcl::get_completions_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||||
let stdlib_signatures = kcl_lib::lsp::kcl::get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
let stdlib_signatures = kcl_lib::lsp::kcl::get_signatures_from_stdlib(&stdlib).map_err(|e| e.to_string())?;
|
||||||
// We can unwrap here because we know the tokeniser is valid, since
|
|
||||||
// we have a test for it.
|
|
||||||
let token_types = kcl_lib::token::TokenType::all_semantic_token_types().unwrap();
|
|
||||||
|
|
||||||
let mut zoo_client = kittycad::Client::new(token);
|
let mut zoo_client = kittycad::Client::new(token);
|
||||||
zoo_client.set_base_url(baseurl.as_str());
|
zoo_client.set_base_url(baseurl.as_str());
|
||||||
@ -287,7 +284,6 @@ pub async fn kcl_lsp_run(
|
|||||||
workspace_folders: Default::default(),
|
workspace_folders: Default::default(),
|
||||||
stdlib_completions,
|
stdlib_completions,
|
||||||
stdlib_signatures,
|
stdlib_signatures,
|
||||||
token_types,
|
|
||||||
token_map: Default::default(),
|
token_map: Default::default(),
|
||||||
ast_map: Default::default(),
|
ast_map: Default::default(),
|
||||||
memory_map: Default::default(),
|
memory_map: Default::default(),
|
||||||
|
|||||||
Reference in New Issue
Block a user