More semantic tokens modifiers (#2823)
* more semantic tokens Signed-off-by: Jess Frazelle <github@jessfraz.com> * updates Signed-off-by: Jess Frazelle <github@jessfraz.com> * remove closed Signed-off-by: Jess Frazelle <github@jessfraz.com> * ficxes Signed-off-by: Jess Frazelle <github@jessfraz.com> * nuke more Signed-off-by: Jess Frazelle <github@jessfraz.com> * fix wasm Signed-off-by: Jess Frazelle <github@jessfraz.com> --------- Signed-off-by: Jess Frazelle <github@jessfraz.com>
This commit is contained in:
4
src/wasm-lib/Cargo.lock
generated
4
src/wasm-lib/Cargo.lock
generated
@ -710,7 +710,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "derive-docs"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"anyhow",
|
||||
@ -1383,7 +1383,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "kcl-lib"
|
||||
version = "0.1.67"
|
||||
version = "0.1.68"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"approx",
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "derive-docs"
|
||||
description = "A tool for generating documentation from Rust derive macros"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
|
@ -761,7 +761,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
||||
is_mock: true,
|
||||
};
|
||||
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -795,7 +795,7 @@ fn generate_code_block_test(fn_name: &str, code_block: &str, index: usize) -> pr
|
||||
let program = parser.ast().unwrap();
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default()).await.unwrap();
|
||||
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
|
||||
// Zoom to fit.
|
||||
ctx.engine
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_someFn {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_someFn {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -106,7 +106,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -135,7 +135,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -17,7 +17,7 @@ mod test_examples_my_func {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -47,7 +47,7 @@ mod test_examples_my_func {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -108,7 +108,7 @@ mod test_examples_my_func {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -137,7 +137,7 @@ mod test_examples_my_func {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -17,7 +17,7 @@ mod test_examples_line_to {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -47,7 +47,7 @@ mod test_examples_line_to {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -108,7 +108,7 @@ mod test_examples_line_to {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -137,7 +137,7 @@ mod test_examples_line_to {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_min {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_min {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
@ -106,7 +106,7 @@ mod test_examples_min {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -135,7 +135,7 @@ mod test_examples_min {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_import {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_import {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -16,7 +16,7 @@ mod test_examples_show {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 5)]
|
||||
@ -45,7 +45,7 @@ mod test_examples_show {
|
||||
let ctx = crate::executor::ExecutorContext::new(&client, Default::default())
|
||||
.await
|
||||
.unwrap();
|
||||
ctx.run(program, None).await.unwrap();
|
||||
ctx.run(&program, None).await.unwrap();
|
||||
ctx.engine
|
||||
.send_modeling_cmd(
|
||||
uuid::Uuid::new_v4(),
|
||||
|
@ -157,7 +157,7 @@ async fn snapshot_endpoint(body: Bytes, state: ExecutorContext) -> Response<Body
|
||||
// Let users know if the test is taking a long time.
|
||||
let (done_tx, done_rx) = oneshot::channel::<()>();
|
||||
let timer = time_until(done_rx);
|
||||
let snapshot = match state.execute_and_prepare_snapshot(program).await {
|
||||
let snapshot = match state.execute_and_prepare_snapshot(&program).await {
|
||||
Ok(sn) => sn,
|
||||
Err(e) => return kcl_err(e),
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "kcl-lib"
|
||||
description = "KittyCAD Language implementation and tools"
|
||||
version = "0.1.67"
|
||||
version = "0.1.68"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/KittyCAD/modeling-app"
|
||||
@ -19,7 +19,7 @@ chrono = "0.4.38"
|
||||
clap = { version = "4.5.7", default-features = false, optional = true }
|
||||
dashmap = "6.0.1"
|
||||
databake = { version = "0.1.8", features = ["derive"] }
|
||||
derive-docs = { version = "0.1.18", path = "../derive-docs" }
|
||||
derive-docs = { version = "0.1.19", path = "../derive-docs" }
|
||||
form_urlencoded = "1.2.1"
|
||||
futures = { version = "0.3.30" }
|
||||
git_rev = "0.1.0"
|
||||
|
@ -1217,7 +1217,7 @@ impl CallExpression {
|
||||
|
||||
// Call the stdlib function
|
||||
let p = func.function().clone().body;
|
||||
let results = match ctx.inner_execute(p, &mut fn_memory, BodyType::Block).await {
|
||||
let results = match ctx.inner_execute(&p, &mut fn_memory, BodyType::Block).await {
|
||||
Ok(results) => results,
|
||||
Err(err) => {
|
||||
// We need to override the source ranges so we don't get the embedded kcl
|
||||
|
@ -1455,7 +1455,7 @@ impl ExecutorContext {
|
||||
/// Kurt uses this for partial execution.
|
||||
pub async fn run(
|
||||
&self,
|
||||
program: crate::ast::types::Program,
|
||||
program: &crate::ast::types::Program,
|
||||
memory: Option<ProgramMemory>,
|
||||
) -> Result<ProgramMemory, KclError> {
|
||||
// Before we even start executing the program, set the units.
|
||||
@ -1481,7 +1481,7 @@ impl ExecutorContext {
|
||||
#[async_recursion]
|
||||
pub(crate) async fn inner_execute(
|
||||
&self,
|
||||
program: crate::ast::types::Program,
|
||||
program: &crate::ast::types::Program,
|
||||
memory: &mut ProgramMemory,
|
||||
body_type: BodyType,
|
||||
) -> Result<ProgramMemory, KclError> {
|
||||
@ -1513,9 +1513,7 @@ impl ExecutorContext {
|
||||
}
|
||||
FunctionKind::Std(func) => {
|
||||
let mut newmem = memory.clone();
|
||||
let result = self
|
||||
.inner_execute(func.program().to_owned(), &mut newmem, BodyType::Block)
|
||||
.await?;
|
||||
let result = self.inner_execute(func.program(), &mut newmem, BodyType::Block).await?;
|
||||
memory.return_ = result.return_;
|
||||
}
|
||||
FunctionKind::UserDefined => {
|
||||
@ -1651,7 +1649,7 @@ impl ExecutorContext {
|
||||
let mut fn_memory = assign_args_to_params(&function_expression, args, memory.clone())?;
|
||||
|
||||
let result = ctx
|
||||
.inner_execute(function_expression.body.clone(), &mut fn_memory, BodyType::Block)
|
||||
.inner_execute(&function_expression.body, &mut fn_memory, BodyType::Block)
|
||||
.await?;
|
||||
|
||||
Ok((result.return_, fn_memory.get_tags()))
|
||||
@ -1701,7 +1699,7 @@ impl ExecutorContext {
|
||||
}
|
||||
|
||||
/// Execute the program, then get a PNG screenshot.
|
||||
pub async fn execute_and_prepare_snapshot(&self, program: Program) -> Result<kittycad::types::TakeSnapshot> {
|
||||
pub async fn execute_and_prepare_snapshot(&self, program: &Program) -> Result<kittycad::types::TakeSnapshot> {
|
||||
let _ = self.run(program, None).await?;
|
||||
|
||||
// Zoom to fit.
|
||||
@ -1818,7 +1816,7 @@ mod tests {
|
||||
settings: Default::default(),
|
||||
is_mock: true,
|
||||
};
|
||||
let memory = ctx.run(program, None).await?;
|
||||
let memory = ctx.run(&program, None).await?;
|
||||
|
||||
Ok(memory)
|
||||
}
|
||||
|
@ -63,9 +63,9 @@ pub trait Backend: Clone + Send + Sync
|
||||
where
|
||||
Self: 'static,
|
||||
{
|
||||
fn client(&self) -> tower_lsp::Client;
|
||||
fn client(&self) -> &tower_lsp::Client;
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager>;
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager>;
|
||||
|
||||
async fn is_initialized(&self) -> bool;
|
||||
|
||||
@ -82,7 +82,7 @@ where
|
||||
async fn remove_workspace_folders(&self, folders: Vec<WorkspaceFolder>);
|
||||
|
||||
/// Get the current code map.
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>>;
|
||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>>;
|
||||
|
||||
/// Insert a new code map.
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>);
|
||||
@ -94,7 +94,7 @@ where
|
||||
async fn clear_code_state(&self);
|
||||
|
||||
/// Get the current diagnostics map.
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport>;
|
||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport>;
|
||||
|
||||
/// On change event.
|
||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool);
|
||||
|
@ -61,6 +61,8 @@ pub struct Backend {
|
||||
pub cache: Arc<cache::CopilotCache>,
|
||||
/// Storage so we can send telemetry data back out.
|
||||
pub telemetry: SafeMap<uuid::Uuid, CopilotCompletionTelemetry>,
|
||||
/// Diagnostics.
|
||||
pub diagnostics_map: SafeMap<String, DocumentDiagnosticReport>,
|
||||
|
||||
pub is_initialized: Arc<tokio::sync::RwLock<bool>>,
|
||||
pub current_handle: UpdateHandle,
|
||||
@ -69,12 +71,12 @@ pub struct Backend {
|
||||
// Implement the shared backend trait for the language server.
|
||||
#[async_trait::async_trait]
|
||||
impl crate::lsp::backend::Backend for Backend {
|
||||
fn client(&self) -> tower_lsp::Client {
|
||||
self.client.clone()
|
||||
fn client(&self) -> &tower_lsp::Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||
self.fs.clone()
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
||||
&self.fs
|
||||
}
|
||||
|
||||
async fn is_initialized(&self) -> bool {
|
||||
@ -109,8 +111,8 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
}
|
||||
}
|
||||
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||
self.code_map.clone()
|
||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>> {
|
||||
&self.code_map
|
||||
}
|
||||
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||
@ -125,8 +127,8 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
self.code_map.clear().await;
|
||||
}
|
||||
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||
Default::default()
|
||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport> {
|
||||
&self.diagnostics_map
|
||||
}
|
||||
|
||||
async fn inner_on_change(&self, _params: TextDocumentItem, _force: bool) {
|
||||
|
@ -68,6 +68,9 @@ lazy_static::lazy_static! {
|
||||
vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::DEFAULT_LIBRARY,
|
||||
SemanticTokenModifier::READONLY,
|
||||
SemanticTokenModifier::STATIC,
|
||||
]
|
||||
};
|
||||
}
|
||||
@ -128,12 +131,12 @@ pub struct Backend {
|
||||
// Implement the shared backend trait for the language server.
|
||||
#[async_trait::async_trait]
|
||||
impl crate::lsp::backend::Backend for Backend {
|
||||
fn client(&self) -> Client {
|
||||
self.client.clone()
|
||||
fn client(&self) -> &Client {
|
||||
&self.client
|
||||
}
|
||||
|
||||
fn fs(&self) -> Arc<crate::fs::FileManager> {
|
||||
self.fs.clone()
|
||||
fn fs(&self) -> &Arc<crate::fs::FileManager> {
|
||||
&self.fs
|
||||
}
|
||||
|
||||
async fn is_initialized(&self) -> bool {
|
||||
@ -168,8 +171,8 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
}
|
||||
}
|
||||
|
||||
fn code_map(&self) -> SafeMap<String, Vec<u8>> {
|
||||
self.code_map.clone()
|
||||
fn code_map(&self) -> &SafeMap<String, Vec<u8>> {
|
||||
&self.code_map
|
||||
}
|
||||
|
||||
async fn insert_code_map(&self, uri: String, text: Vec<u8>) {
|
||||
@ -189,8 +192,8 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
self.semantic_tokens_map.clear().await;
|
||||
}
|
||||
|
||||
fn current_diagnostics_map(&self) -> SafeMap<String, DocumentDiagnosticReport> {
|
||||
self.diagnostics_map.clone()
|
||||
fn current_diagnostics_map(&self) -> &SafeMap<String, DocumentDiagnosticReport> {
|
||||
&self.diagnostics_map
|
||||
}
|
||||
|
||||
async fn inner_on_change(&self, params: TextDocumentItem, force: bool) {
|
||||
@ -220,8 +223,8 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
false
|
||||
};
|
||||
|
||||
let tokens_changed = if let Some(previous_tokens) = previous_tokens.clone() {
|
||||
previous_tokens != tokens
|
||||
let tokens_changed = if let Some(previous_tokens) = &previous_tokens {
|
||||
*previous_tokens != tokens
|
||||
} else {
|
||||
true
|
||||
};
|
||||
@ -236,7 +239,7 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
// Update our token map.
|
||||
self.token_map.insert(params.uri.to_string(), tokens.clone()).await;
|
||||
// Update our semantic tokens.
|
||||
self.update_semantic_tokens(tokens.clone(), ¶ms).await;
|
||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||
}
|
||||
|
||||
// Lets update the ast.
|
||||
@ -278,7 +281,7 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
.await;
|
||||
|
||||
// Update our semantic tokens.
|
||||
self.update_semantic_tokens(tokens, ¶ms).await;
|
||||
self.update_semantic_tokens(&tokens, ¶ms).await;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
@ -308,7 +311,7 @@ impl crate::lsp::backend::Backend for Backend {
|
||||
// Execute the code if we have an executor context.
|
||||
// This function automatically executes if we should & updates the diagnostics if we got
|
||||
// errors.
|
||||
if self.execute(¶ms, ast.clone()).await.is_err() {
|
||||
if self.execute(¶ms, &ast).await.is_err() {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -335,23 +338,18 @@ impl Backend {
|
||||
*self.executor_ctx.write().await = Some(executor_ctx);
|
||||
}
|
||||
|
||||
async fn update_semantic_tokens(&self, tokens: Vec<crate::token::Token>, params: &TextDocumentItem) {
|
||||
async fn update_semantic_tokens(&self, tokens: &[crate::token::Token], params: &TextDocumentItem) {
|
||||
// Update the semantic tokens map.
|
||||
let mut semantic_tokens = vec![];
|
||||
let mut last_position = Position::new(0, 0);
|
||||
for token in &tokens {
|
||||
let Ok(mut token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||
for token in tokens {
|
||||
let Ok(token_type) = SemanticTokenType::try_from(token.token_type) else {
|
||||
// We continue here because not all tokens can be converted this way, we will get
|
||||
// the rest from the ast.
|
||||
continue;
|
||||
};
|
||||
|
||||
if token.token_type == crate::token::TokenType::Word && self.stdlib_completions.contains_key(&token.value) {
|
||||
// This is a stdlib function.
|
||||
token_type = SemanticTokenType::FUNCTION;
|
||||
}
|
||||
|
||||
let mut token_type_index = match self.get_semantic_token_type_index(token_type.clone()) {
|
||||
let mut token_type_index = match self.get_semantic_token_type_index(&token_type) {
|
||||
Some(index) => index,
|
||||
// This is actually bad this should not fail.
|
||||
// The test for listing all semantic token types should make this never happen.
|
||||
@ -366,12 +364,12 @@ impl Backend {
|
||||
}
|
||||
};
|
||||
|
||||
let source_range: SourceRange = token.clone().into();
|
||||
let source_range: SourceRange = token.into();
|
||||
let position = source_range.start_to_lsp_position(¶ms.text);
|
||||
|
||||
// Calculate the token modifiers.
|
||||
// Get the value at the current position.
|
||||
let token_modifiers_bitset: u32 = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||
let token_modifiers_bitset = if let Some(ast) = self.ast_map.get(¶ms.uri.to_string()).await {
|
||||
let token_index = Arc::new(Mutex::new(token_type_index));
|
||||
let modifier_index: Arc<Mutex<u32>> = Arc::new(Mutex::new(0));
|
||||
crate::walk::walk(&ast, &|node: crate::walk::Node| {
|
||||
@ -380,7 +378,7 @@ impl Backend {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let get_modifier = |modifier: SemanticTokenModifier| -> Result<bool> {
|
||||
let get_modifier = |modifier: Vec<SemanticTokenModifier>| -> Result<bool> {
|
||||
let mut mods = modifier_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
let Some(token_modifier_index) = self.get_semantic_token_modifier_index(modifier) else {
|
||||
return Ok(true);
|
||||
@ -395,35 +393,41 @@ impl Backend {
|
||||
|
||||
match node {
|
||||
crate::walk::Node::TagDeclarator(_) => {
|
||||
return get_modifier(SemanticTokenModifier::DEFINITION);
|
||||
return get_modifier(vec![
|
||||
SemanticTokenModifier::DEFINITION,
|
||||
SemanticTokenModifier::STATIC,
|
||||
]);
|
||||
}
|
||||
crate::walk::Node::VariableDeclarator(variable) => {
|
||||
let sr: SourceRange = variable.id.clone().into();
|
||||
let sr: SourceRange = (&variable.id).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
if let Value::FunctionExpression(_) = &variable.init {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
return get_modifier(vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::READONLY,
|
||||
]);
|
||||
}
|
||||
}
|
||||
crate::walk::Node::Parameter(_) => {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PARAMETER) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PARAMETER) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
return Ok(false);
|
||||
}
|
||||
crate::walk::Node::MemberExpression(member_expression) => {
|
||||
let sr: SourceRange = member_expression.property.clone().into();
|
||||
let sr: SourceRange = (&member_expression.property).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
@ -431,24 +435,30 @@ impl Backend {
|
||||
}
|
||||
}
|
||||
crate::walk::Node::ObjectProperty(object_property) => {
|
||||
let sr: SourceRange = object_property.key.clone().into();
|
||||
let sr: SourceRange = (&object_property.key).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::PROPERTY) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::PROPERTY) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
}
|
||||
return get_modifier(SemanticTokenModifier::DECLARATION);
|
||||
return get_modifier(vec![SemanticTokenModifier::DECLARATION]);
|
||||
}
|
||||
crate::walk::Node::CallExpression(call_expr) => {
|
||||
let sr: SourceRange = call_expr.callee.clone().into();
|
||||
let sr: SourceRange = (&call_expr.callee).into();
|
||||
if sr.contains(source_range.start()) {
|
||||
let mut ti = token_index.lock().map_err(|_| anyhow::anyhow!("mutex"))?;
|
||||
*ti = match self.get_semantic_token_type_index(SemanticTokenType::FUNCTION) {
|
||||
*ti = match self.get_semantic_token_type_index(&SemanticTokenType::FUNCTION) {
|
||||
Some(index) => index,
|
||||
None => token_type_index,
|
||||
};
|
||||
|
||||
if self.stdlib_completions.contains_key(&call_expr.callee.name) {
|
||||
// This is a stdlib function.
|
||||
return get_modifier(vec![SemanticTokenModifier::DEFAULT_LIBRARY]);
|
||||
}
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
@ -516,7 +526,7 @@ impl Backend {
|
||||
|
||||
async fn clear_diagnostics_map(&self, uri: &url::Url, severity: Option<DiagnosticSeverity>) {
|
||||
let mut items = match self.diagnostics_map.get(uri.as_str()).await {
|
||||
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items.clone(),
|
||||
Some(DocumentDiagnosticReport::Full(report)) => report.full_document_diagnostic_report.items,
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
@ -568,9 +578,9 @@ impl Backend {
|
||||
.await;
|
||||
}
|
||||
|
||||
let DocumentDiagnosticReport::Full(mut report) = self
|
||||
.diagnostics_map
|
||||
.get(params.uri.clone().as_str())
|
||||
let DocumentDiagnosticReport::Full(mut report) =
|
||||
self.diagnostics_map
|
||||
.get(params.uri.as_str())
|
||||
.await
|
||||
.unwrap_or(DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
@ -607,7 +617,7 @@ impl Backend {
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn execute(&self, params: &TextDocumentItem, ast: crate::ast::types::Program) -> Result<()> {
|
||||
async fn execute(&self, params: &TextDocumentItem, ast: &crate::ast::types::Program) -> Result<()> {
|
||||
// Check if we can execute.
|
||||
if !self.can_execute().await {
|
||||
return Ok(());
|
||||
@ -649,18 +659,32 @@ impl Backend {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_semantic_token_type_index(&self, token_type: SemanticTokenType) -> Option<u32> {
|
||||
pub fn get_semantic_token_type_index(&self, token_type: &SemanticTokenType) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_TYPES
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.position(|x| *x == *token_type)
|
||||
.map(|y| y as u32)
|
||||
}
|
||||
|
||||
pub fn get_semantic_token_modifier_index(&self, token_type: SemanticTokenModifier) -> Option<u32> {
|
||||
SEMANTIC_TOKEN_MODIFIERS
|
||||
pub fn get_semantic_token_modifier_index(&self, token_types: Vec<SemanticTokenModifier>) -> Option<u32> {
|
||||
if token_types.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut modifier = None;
|
||||
for token_type in token_types {
|
||||
if let Some(index) = SEMANTIC_TOKEN_MODIFIERS
|
||||
.iter()
|
||||
.position(|x| *x == token_type)
|
||||
.map(|y| y as u32)
|
||||
{
|
||||
modifier = match modifier {
|
||||
Some(modifier) => Some(modifier | index),
|
||||
None => Some(index),
|
||||
};
|
||||
}
|
||||
}
|
||||
modifier
|
||||
}
|
||||
|
||||
async fn completions_get_variables_from_ast(&self, file_name: &str) -> Vec<CompletionItem> {
|
||||
|
@ -109,6 +109,7 @@ async fn copilot_lsp_server() -> Result<crate::lsp::copilot::Backend> {
|
||||
telemetry: Default::default(),
|
||||
is_initialized: Default::default(),
|
||||
current_handle: Default::default(),
|
||||
diagnostics_map: Default::default(),
|
||||
});
|
||||
let server = service.inner();
|
||||
|
||||
@ -1086,6 +1087,45 @@ async fn test_kcl_lsp_semantic_tokens() {
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_kcl_lsp_semantic_tokens_large_file() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
let code = include_str!("../../../tests/executor/inputs/global-tags.kcl");
|
||||
|
||||
// Send open file.
|
||||
server
|
||||
.did_open(tower_lsp::lsp_types::DidOpenTextDocumentParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentItem {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
language_id: "kcl".to_string(),
|
||||
version: 1,
|
||||
text: code.to_string(),
|
||||
},
|
||||
})
|
||||
.await;
|
||||
server.wait_on_handle().await;
|
||||
|
||||
// Send semantic tokens request.
|
||||
let semantic_tokens = server
|
||||
.semantic_tokens_full(tower_lsp::lsp_types::SemanticTokensParams {
|
||||
text_document: tower_lsp::lsp_types::TextDocumentIdentifier {
|
||||
uri: "file:///test.kcl".try_into().unwrap(),
|
||||
},
|
||||
partial_result_params: Default::default(),
|
||||
work_done_progress_params: Default::default(),
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
// Check the semantic tokens.
|
||||
if let tower_lsp::lsp_types::SemanticTokensResult::Tokens(semantic_tokens) = semantic_tokens {
|
||||
assert!(!semantic_tokens.data.is_empty());
|
||||
} else {
|
||||
panic!("Expected semantic tokens");
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_kcl_lsp_semantic_tokens_with_modifiers() {
|
||||
let server = kcl_lsp_server(false).await.unwrap();
|
||||
@ -1155,35 +1195,49 @@ fn myFn = (param1) => {
|
||||
// Check the semantic tokens.
|
||||
if let tower_lsp::lsp_types::SemanticTokensResult::Tokens(semantic_tokens) = semantic_tokens {
|
||||
let function_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::FUNCTION)
|
||||
.get_semantic_token_type_index(&SemanticTokenType::FUNCTION)
|
||||
.unwrap();
|
||||
let property_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::PROPERTY)
|
||||
.get_semantic_token_type_index(&SemanticTokenType::PROPERTY)
|
||||
.unwrap();
|
||||
let parameter_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::PARAMETER)
|
||||
.get_semantic_token_type_index(&SemanticTokenType::PARAMETER)
|
||||
.unwrap();
|
||||
let variable_index = server
|
||||
.get_semantic_token_type_index(SemanticTokenType::VARIABLE)
|
||||
.get_semantic_token_type_index(&SemanticTokenType::VARIABLE)
|
||||
.unwrap();
|
||||
|
||||
let declaration_index = server
|
||||
.get_semantic_token_modifier_index(SemanticTokenModifier::DECLARATION)
|
||||
.get_semantic_token_modifier_index(vec![SemanticTokenModifier::DECLARATION])
|
||||
.unwrap();
|
||||
let definition_index = server
|
||||
.get_semantic_token_modifier_index(SemanticTokenModifier::DEFINITION)
|
||||
.get_semantic_token_modifier_index(vec![SemanticTokenModifier::DEFINITION])
|
||||
.unwrap();
|
||||
let default_library_index = server
|
||||
.get_semantic_token_modifier_index(vec![SemanticTokenModifier::DEFAULT_LIBRARY])
|
||||
.unwrap();
|
||||
|
||||
let variable_modifiers = server
|
||||
.get_semantic_token_modifier_index(vec![
|
||||
SemanticTokenModifier::DECLARATION,
|
||||
SemanticTokenModifier::READONLY,
|
||||
])
|
||||
.unwrap();
|
||||
let tag_modifiers = server
|
||||
.get_semantic_token_modifier_index(vec![SemanticTokenModifier::DEFINITION, SemanticTokenModifier::STATIC])
|
||||
.unwrap();
|
||||
|
||||
// Iterate over the tokens and check the token types.
|
||||
let mut found_definition = false;
|
||||
let mut found_parameter = false;
|
||||
let mut found_property = false;
|
||||
let mut found_function_declaration = false;
|
||||
let mut found_variable_declaration = false;
|
||||
let mut found_property_declaration = false;
|
||||
let mut found_tag_declaration = false;
|
||||
let mut found_default_library = false;
|
||||
for token in semantic_tokens.data {
|
||||
if token.token_modifiers_bitset == definition_index {
|
||||
found_definition = true;
|
||||
if token.token_type == function_index && token.token_modifiers_bitset == default_library_index {
|
||||
found_default_library = true;
|
||||
}
|
||||
|
||||
if token.token_type == parameter_index {
|
||||
@ -1192,11 +1246,15 @@ fn myFn = (param1) => {
|
||||
found_property = true;
|
||||
}
|
||||
|
||||
if token.token_type == function_index && token.token_modifiers_bitset == declaration_index {
|
||||
if token.token_type == definition_index && token.token_modifiers_bitset == tag_modifiers {
|
||||
found_tag_declaration = true;
|
||||
}
|
||||
|
||||
if token.token_type == function_index && token.token_modifiers_bitset == variable_modifiers {
|
||||
found_function_declaration = true;
|
||||
}
|
||||
|
||||
if token.token_type == variable_index && token.token_modifiers_bitset == declaration_index {
|
||||
if token.token_type == variable_index && token.token_modifiers_bitset == variable_modifiers {
|
||||
found_variable_declaration = true;
|
||||
}
|
||||
|
||||
@ -1204,21 +1262,18 @@ fn myFn = (param1) => {
|
||||
found_property_declaration = true;
|
||||
}
|
||||
|
||||
if found_definition
|
||||
&& found_parameter
|
||||
if found_parameter
|
||||
&& found_property
|
||||
&& found_function_declaration
|
||||
&& found_variable_declaration
|
||||
&& found_property_declaration
|
||||
&& found_tag_declaration
|
||||
&& found_default_library
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found_definition {
|
||||
panic!("Expected definition token");
|
||||
}
|
||||
|
||||
if !found_parameter {
|
||||
panic!("Expected parameter token");
|
||||
}
|
||||
@ -1238,6 +1293,14 @@ fn myFn = (param1) => {
|
||||
if !found_property_declaration {
|
||||
panic!("Expected property declaration token");
|
||||
}
|
||||
|
||||
if !found_tag_declaration {
|
||||
panic!("Expected tag declaration token");
|
||||
}
|
||||
|
||||
if !found_default_library {
|
||||
panic!("Expected default library token");
|
||||
}
|
||||
} else {
|
||||
panic!("Expected semantic tokens");
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ pub async fn execute_wasm(
|
||||
is_mock,
|
||||
};
|
||||
|
||||
let memory = ctx.run(program, Some(memory)).await.map_err(String::from)?;
|
||||
let memory = ctx.run(&program, Some(memory)).await.map_err(String::from)?;
|
||||
// The serde-wasm-bindgen does not work here because of weird HashMap issues so we use the
|
||||
// gloo-serialize crate instead.
|
||||
JsValue::from_serde(&memory).map_err(|e| e.to_string())
|
||||
@ -357,6 +357,7 @@ pub async fn copilot_lsp_run(config: ServerConfig, token: String, baseurl: Strin
|
||||
|
||||
is_initialized: Default::default(),
|
||||
current_handle: Default::default(),
|
||||
diagnostics_map: Default::default(),
|
||||
})
|
||||
.custom_method("copilot/setEditorInfo", kcl_lib::lsp::copilot::Backend::set_editor_info)
|
||||
.custom_method(
|
||||
|
@ -51,7 +51,7 @@ async fn execute_and_snapshot(code: &str, units: UnitLength) -> Result<image::Dy
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
let program = parser.ast()?;
|
||||
|
||||
let snapshot = ctx.execute_and_prepare_snapshot(program).await?;
|
||||
let snapshot = ctx.execute_and_prepare_snapshot(&program).await?;
|
||||
|
||||
// Create a temporary file to write the output to.
|
||||
let output_file = std::env::temp_dir().join(format!("kcl_output_{}.png", uuid::Uuid::new_v4()));
|
||||
|
@ -35,7 +35,7 @@ async fn setup(code: &str, name: &str) -> Result<(ExecutorContext, Program, uuid
|
||||
let parser = kcl_lib::parser::Parser::new(tokens);
|
||||
let program = parser.ast()?;
|
||||
let ctx = kcl_lib::executor::ExecutorContext::new(&client, Default::default()).await?;
|
||||
let memory = ctx.run(program.clone(), None).await?;
|
||||
let memory = ctx.run(&program, None).await?;
|
||||
|
||||
// We need to get the sketch ID.
|
||||
// Get the sketch group ID from memory.
|
||||
|
Reference in New Issue
Block a user